From ec9fcf3350b8e5809a9968504d6e55ae0ab10fbd Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 30 Nov 2022 15:46:21 -0600 Subject: [PATCH 001/204] first prototype in separating `Clustering` into multiple steps --- element_array_ephys/spike_sorting/__init__.py | 0 .../spike_sorting/ecephys_spike_sorting.py | 250 ++++++++++++++++++ 2 files changed, 250 insertions(+) create mode 100644 element_array_ephys/spike_sorting/__init__.py create mode 100644 element_array_ephys/spike_sorting/ecephys_spike_sorting.py diff --git a/element_array_ephys/spike_sorting/__init__.py b/element_array_ephys/spike_sorting/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py new file mode 100644 index 00000000..1dc71e7a --- /dev/null +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -0,0 +1,250 @@ +import datajoint as dj +from element_array_ephys import get_logger +from decimal import Decimal +import json +from datetime import datetime, timedelta + +from element_interface.utils import find_full_path +from element_array_ephys.readers import spikeglx, kilosort, openephys, kilosort_triggering + +log = get_logger(__name__) + +schema = dj.schema() + +ephys = None + + +def activate(schema_name, ephys_schema_name, *, create_schema=True, create_tables=True): + """ + activate(schema_name, *, create_schema=True, create_tables=True, activated_ephys=None) + :param schema_name: schema name on the database server to activate the `spike_sorting` schema + :param ephys_schema_name: schema name of the activated ephys element for which this ephys_report schema will be downstream from + :param create_schema: when True (default), create schema in the database if it does not yet exist. + :param create_tables: when True (default), create tables in the database if they do not yet exist. + (The "activation" of this ephys_report module should be evoked by one of the ephys modules only) + """ + global ephys + ephys = dj.create_virtual_module("ephys", ephys_schema_name) + schema.activate( + schema_name, + create_schema=create_schema, + create_tables=create_tables, + add_objects=ephys.__dict__, + ) + + +@schema +class KilosortPreProcessing(dj.Imported): + """A processing table to handle each clustering task. + """ + definition = """ + -> ephys.ClusteringTask + --- + params: longblob # finalized parameterset for this run + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + @property + def key_source(self): + return (ephys.ClusteringTask * ephys.ClusteringParamSet + & {'task_mode': 'trigger'} + & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")') + + def make(self, key): + """Triggers or imports clustering analysis.""" + execution_time = datetime.utcnow() + + task_mode, output_dir = (ephys.ClusteringTask & key).fetch1( + "task_mode", "clustering_output_dir" + ) + + assert task_mode == "trigger", 'Supporting "trigger" task_mode only' + + if not output_dir: + output_dir = ephys.ClusteringTask.infer_output_dir(key, relative=True, mkdir=True) + # update clustering_output_dir + ephys.ClusteringTask.update1( + {**key, "clustering_output_dir": output_dir.as_posix()} + ) + + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method, params = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method", "params") + + assert clustering_method in ("kilosort2", "kilosort2.5", "kilosort3"), 'Supporting "kilosort" clustering_method only' + + # add additional probe-recording and channels details into `params` + params = {**params, **ephys.get_recording_channels_details(key)} + params["fs"] = params["sample_rate"] + + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX( + spikeglx_meta_filepath.parent + ) + spikeglx_recording.validate_file("ap") + + run_kilosort = kilosort_triggering.SGLXKilosortPipeline( + npx_input_dir=spikeglx_meta_filepath.parent, + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + run_CatGT=True, + ) + run_kilosort.run_CatGT() + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # run kilosort + run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( + npx_input_dir=oe_probe.recording_info["recording_files"][0], + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + ) + run_kilosort._modules = ['depth_estimation', 'median_subtraction'] + run_kilosort.run_modules() + + self.insert1({**key, + "params": params, + "execution_time": execution_time, + "execution_duration": (datetime.utcnow() - execution_time).total_seconds() / 3600}) + + +@schema +class KilosortClustering(dj.Imported): + """A processing table to handle each clustering task. + """ + definition = """ + -> KilosortPreProcessing + --- + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + def make(self, key): + execution_time = datetime.utcnow() + + output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method") + assert clustering_method in ("kilosort2", "kilosort2.5", "kilosort3"), 'Supporting "kilosort" clustering_method only' + + params = (KilosortPreProcessing & key).fetch1('params') + + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX( + spikeglx_meta_filepath.parent + ) + spikeglx_recording.validate_file("ap") + + run_kilosort = kilosort_triggering.SGLXKilosortPipeline( + npx_input_dir=spikeglx_meta_filepath.parent, + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + run_CatGT=True, + ) + run_kilosort._modules = ['kilosort_helper'] + run_kilosort._CatGT_finished = True + run_kilosort.run_modules() + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # run kilosort + run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( + npx_input_dir=oe_probe.recording_info["recording_files"][0], + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + ) + run_kilosort._modules = ['kilosort_helper'] + run_kilosort.run_modules() + + self.insert1({**key, + "execution_time": execution_time, + "execution_duration": (datetime.utcnow() - execution_time).total_seconds() / 3600}) + + +@schema +class KilosortPostProcessing(dj.Imported): + """A processing table to handle each clustering task. + """ + definition = """ + -> KilosortClustering + --- + modules_status: longblob # dictionary of summary status for all modules + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + def make(self, key): + execution_time = datetime.utcnow() + + output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method") + assert clustering_method in ( + "kilosort2", "kilosort2.5", "kilosort3"), 'Supporting "kilosort" clustering_method only' + + params = (KilosortPreProcessing & key).fetch1('params') + + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX( + spikeglx_meta_filepath.parent + ) + spikeglx_recording.validate_file("ap") + + run_kilosort = kilosort_triggering.SGLXKilosortPipeline( + npx_input_dir=spikeglx_meta_filepath.parent, + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + run_CatGT=True, + ) + run_kilosort._modules = ['kilosort_postprocessing', + 'noise_templates', + 'mean_waveforms', + 'quality_metrics'] + run_kilosort._CatGT_finished = True + run_kilosort.run_modules() + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # run kilosort + run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( + npx_input_dir=oe_probe.recording_info["recording_files"][0], + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + ) + run_kilosort._modules = ['kilosort_postprocessing', + 'noise_templates', + 'mean_waveforms', + 'quality_metrics'] + run_kilosort.run_modules() + + with open(self._modules_input_hash_fp) as f: + modules_status = json.load(f) + + self.insert1({**key, + "modules_status": modules_status, + "execution_time": execution_time, + "execution_duration": (datetime.utcnow() - execution_time).total_seconds() / 3600}) From f5724384952f801086e751d3645437bea2694604 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 5 Jan 2023 14:06:00 -0600 Subject: [PATCH 002/204] Update ecephys_spike_sorting.py --- .../spike_sorting/ecephys_spike_sorting.py | 145 ++++++++++++------ 1 file changed, 94 insertions(+), 51 deletions(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index 1dc71e7a..1592e65e 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -5,7 +5,12 @@ from datetime import datetime, timedelta from element_interface.utils import find_full_path -from element_array_ephys.readers import spikeglx, kilosort, openephys, kilosort_triggering +from element_array_ephys.readers import ( + spikeglx, + kilosort, + openephys, + kilosort_triggering, +) log = get_logger(__name__) @@ -35,8 +40,8 @@ def activate(schema_name, ephys_schema_name, *, create_schema=True, create_table @schema class KilosortPreProcessing(dj.Imported): - """A processing table to handle each clustering task. - """ + """A processing table to handle each clustering task.""" + definition = """ -> ephys.ClusteringTask --- @@ -47,9 +52,11 @@ class KilosortPreProcessing(dj.Imported): @property def key_source(self): - return (ephys.ClusteringTask * ephys.ClusteringParamSet - & {'task_mode': 'trigger'} - & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")') + return ( + ephys.ClusteringTask * ephys.ClusteringParamSet + & {"task_mode": "trigger"} + & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' + ) def make(self, key): """Triggers or imports clustering analysis.""" @@ -62,7 +69,9 @@ def make(self, key): assert task_mode == "trigger", 'Supporting "trigger" task_mode only' if not output_dir: - output_dir = ephys.ClusteringTask.infer_output_dir(key, relative=True, mkdir=True) + output_dir = ephys.ClusteringTask.infer_output_dir( + key, relative=True, mkdir=True + ) # update clustering_output_dir ephys.ClusteringTask.update1( {**key, "clustering_output_dir": output_dir.as_posix()} @@ -71,10 +80,14 @@ def make(self, key): kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) acq_software, clustering_method, params = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method", "params") - assert clustering_method in ("kilosort2", "kilosort2.5", "kilosort3"), 'Supporting "kilosort" clustering_method only' + assert clustering_method in ( + "kilosort2", + "kilosort2.5", + "kilosort3", + ), 'Supporting "kilosort" clustering_method only' # add additional probe-recording and channels details into `params` params = {**params, **ephys.get_recording_channels_details(key)} @@ -82,17 +95,19 @@ def make(self, key): if acq_software == "SpikeGLX": spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX( - spikeglx_meta_filepath.parent - ) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) spikeglx_recording.validate_file("ap") + run_CatGT = ( + params.pop("run_CatGT", True) + and "_tcat." not in spikeglx_meta_filepath.stem + ) run_kilosort = kilosort_triggering.SGLXKilosortPipeline( npx_input_dir=spikeglx_meta_filepath.parent, ks_output_dir=kilosort_dir, params=params, KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=True, + run_CatGT=run_CatGT, ) run_kilosort.run_CatGT() elif acq_software == "Open Ephys": @@ -107,19 +122,26 @@ def make(self, key): params=params, KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', ) - run_kilosort._modules = ['depth_estimation', 'median_subtraction'] + run_kilosort._modules = ["depth_estimation", "median_subtraction"] run_kilosort.run_modules() - self.insert1({**key, - "params": params, - "execution_time": execution_time, - "execution_duration": (datetime.utcnow() - execution_time).total_seconds() / 3600}) + self.insert1( + { + **key, + "params": params, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) @schema class KilosortClustering(dj.Imported): - """A processing table to handle each clustering task. - """ + """A processing table to handle each clustering task.""" + definition = """ -> KilosortPreProcessing --- @@ -134,17 +156,19 @@ def make(self, key): kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - assert clustering_method in ("kilosort2", "kilosort2.5", "kilosort3"), 'Supporting "kilosort" clustering_method only' + assert clustering_method in ( + "kilosort2", + "kilosort2.5", + "kilosort3", + ), 'Supporting "kilosort" clustering_method only' - params = (KilosortPreProcessing & key).fetch1('params') + params = (KilosortPreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX( - spikeglx_meta_filepath.parent - ) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) spikeglx_recording.validate_file("ap") run_kilosort = kilosort_triggering.SGLXKilosortPipeline( @@ -154,7 +178,7 @@ def make(self, key): KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', run_CatGT=True, ) - run_kilosort._modules = ['kilosort_helper'] + run_kilosort._modules = ["kilosort_helper"] run_kilosort._CatGT_finished = True run_kilosort.run_modules() elif acq_software == "Open Ephys": @@ -169,18 +193,25 @@ def make(self, key): params=params, KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', ) - run_kilosort._modules = ['kilosort_helper'] + run_kilosort._modules = ["kilosort_helper"] run_kilosort.run_modules() - self.insert1({**key, - "execution_time": execution_time, - "execution_duration": (datetime.utcnow() - execution_time).total_seconds() / 3600}) + self.insert1( + { + **key, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) @schema class KilosortPostProcessing(dj.Imported): - """A processing table to handle each clustering task. - """ + """A processing table to handle each clustering task.""" + definition = """ -> KilosortClustering --- @@ -196,18 +227,19 @@ def make(self, key): kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") assert clustering_method in ( - "kilosort2", "kilosort2.5", "kilosort3"), 'Supporting "kilosort" clustering_method only' + "kilosort2", + "kilosort2.5", + "kilosort3", + ), 'Supporting "kilosort" clustering_method only' - params = (KilosortPreProcessing & key).fetch1('params') + params = (KilosortPreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX( - spikeglx_meta_filepath.parent - ) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) spikeglx_recording.validate_file("ap") run_kilosort = kilosort_triggering.SGLXKilosortPipeline( @@ -217,10 +249,12 @@ def make(self, key): KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', run_CatGT=True, ) - run_kilosort._modules = ['kilosort_postprocessing', - 'noise_templates', - 'mean_waveforms', - 'quality_metrics'] + run_kilosort._modules = [ + "kilosort_postprocessing", + "noise_templates", + "mean_waveforms", + "quality_metrics", + ] run_kilosort._CatGT_finished = True run_kilosort.run_modules() elif acq_software == "Open Ephys": @@ -235,16 +269,25 @@ def make(self, key): params=params, KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', ) - run_kilosort._modules = ['kilosort_postprocessing', - 'noise_templates', - 'mean_waveforms', - 'quality_metrics'] + run_kilosort._modules = [ + "kilosort_postprocessing", + "noise_templates", + "mean_waveforms", + "quality_metrics", + ] run_kilosort.run_modules() with open(self._modules_input_hash_fp) as f: modules_status = json.load(f) - self.insert1({**key, - "modules_status": modules_status, - "execution_time": execution_time, - "execution_duration": (datetime.utcnow() - execution_time).total_seconds() / 3600}) + self.insert1( + { + **key, + "modules_status": modules_status, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) From edf1578b45425410c6cb53e5777866afa5f04f98 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 5 Jan 2023 14:07:53 -0600 Subject: [PATCH 003/204] Update ecephys_spike_sorting.py --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index 1592e65e..eb02c251 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -98,7 +98,7 @@ def make(self, key): spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) spikeglx_recording.validate_file("ap") run_CatGT = ( - params.pop("run_CatGT", True) + params.get("run_CatGT", True) and "_tcat." not in spikeglx_meta_filepath.stem ) From 7e267c57571c3396ecdc60d159db0245326e4047 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 5 Jan 2023 17:15:51 -0600 Subject: [PATCH 004/204] fix typo --- element_array_ephys/ephys_no_curation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 943d3354..9414c49e 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -799,7 +799,7 @@ class Clustering(dj.Imported): Attributes: ClusteringTask (foreign key): ClusteringTask primary key. clustering_time (datetime): Time when clustering results are generated. - package_version (varchar(16) ): Package version used for a clustering analysis. + package_version (varchar(16): Package version used for a clustering analysis. """ definition = """ From 6e7ddf15966d51474455efdd758e99e93850b6b7 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 6 Jan 2023 10:23:28 -0600 Subject: [PATCH 005/204] Update ecephys_spike_sorting.py --- .../spike_sorting/ecephys_spike_sorting.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index eb02c251..ed6d699e 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -19,17 +19,23 @@ ephys = None -def activate(schema_name, ephys_schema_name, *, create_schema=True, create_tables=True): +def activate( + schema_name, + *, + ephys_module, + create_schema=True, + create_tables=True, +): """ activate(schema_name, *, create_schema=True, create_tables=True, activated_ephys=None) :param schema_name: schema name on the database server to activate the `spike_sorting` schema - :param ephys_schema_name: schema name of the activated ephys element for which this ephys_report schema will be downstream from + :param ephys_module: the activated ephys element for which this ephys_report schema will be downstream from :param create_schema: when True (default), create schema in the database if it does not yet exist. :param create_tables: when True (default), create tables in the database if they do not yet exist. (The "activation" of this ephys_report module should be evoked by one of the ephys modules only) """ global ephys - ephys = dj.create_virtual_module("ephys", ephys_schema_name) + ephys = ephys_module schema.activate( schema_name, create_schema=create_schema, From 7fd9bb4368208e12a809b49e402190e3d34eaa07 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 6 Jan 2023 11:15:43 -0600 Subject: [PATCH 006/204] improve log messages --- element_array_ephys/readers/kilosort_triggering.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/readers/kilosort_triggering.py b/element_array_ephys/readers/kilosort_triggering.py index 8e4b80ff..5d76c7af 100644 --- a/element_array_ephys/readers/kilosort_triggering.py +++ b/element_array_ephys/readers/kilosort_triggering.py @@ -21,13 +21,13 @@ get_noise_channels, ) except Exception as e: - print(f'Error in loading "ecephys_spike_sorting" package - {str(e)}') + print(f'Warning: Failed loading "ecephys_spike_sorting" package - {str(e)}') # import pykilosort package try: import pykilosort except Exception as e: - print(f'Error in loading "pykilosort" package - {str(e)}') + print(f'Warning: Failed loading "pykilosort" package - {str(e)}') class SGLXKilosortPipeline: From 26a56e72f76afa24833f275c01df0606e9d24ec2 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 6 Jan 2023 11:58:01 -0600 Subject: [PATCH 007/204] fix key_source --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index ed6d699e..d7f7865a 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -62,7 +62,7 @@ def key_source(self): ephys.ClusteringTask * ephys.ClusteringParamSet & {"task_mode": "trigger"} & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' - ) + ) - ephys.Clustering def make(self, key): """Triggers or imports clustering analysis.""" From 654bc522ce8bba576b2793924f39d1a97416bb0f Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 18 Jan 2023 11:58:13 -0600 Subject: [PATCH 008/204] Update ecephys_spike_sorting.py --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index d7f7865a..0cf4bea8 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -297,3 +297,6 @@ def make(self, key): / 3600, } ) + + # all finished, insert this `key` into ephys.Clustering + ephys.Clustering.insert1({**key, "clustering_time": datetime.utcnow()}) From f75e14f13a2f413b2c34be401652ade70ce11a94 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 19 Jan 2023 15:43:15 -0600 Subject: [PATCH 009/204] bugfix --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index 0cf4bea8..3eca46b9 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -283,7 +283,7 @@ def make(self, key): ] run_kilosort.run_modules() - with open(self._modules_input_hash_fp) as f: + with open(run_kilosort._modules_input_hash_fp) as f: modules_status = json.load(f) self.insert1( From a32d1d25b895bc1c05f4149f920b206b07646aae Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 19 Jan 2023 16:23:37 -0600 Subject: [PATCH 010/204] bugfix --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index 3eca46b9..d33a3752 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -299,4 +299,6 @@ def make(self, key): ) # all finished, insert this `key` into ephys.Clustering - ephys.Clustering.insert1({**key, "clustering_time": datetime.utcnow()}) + ephys.Clustering.insert1( + {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True + ) From 3bea7755245905dc59271a248514caad004c7f10 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 20 Jan 2023 16:50:35 -0600 Subject: [PATCH 011/204] Update kilosort_triggering.py --- element_array_ephys/readers/kilosort_triggering.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/element_array_ephys/readers/kilosort_triggering.py b/element_array_ephys/readers/kilosort_triggering.py index 7f30cac4..4e831d1b 100644 --- a/element_array_ephys/readers/kilosort_triggering.py +++ b/element_array_ephys/readers/kilosort_triggering.py @@ -777,8 +777,7 @@ def _write_channel_map_file( # channels to exclude mask = get_noise_channels(ap_band_file, channel_count, sample_rate, bit_volts) - bad_channel_ind = np.where(mask is False)[0] - connected[bad_channel_ind] = 0 + connected = np.where(mask is False, 0, connected) mdict = { "chanMap": chanMap, From 4f955b32e8049c2961e9c916029b2d27f008476a Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Mon, 23 Jan 2023 10:53:46 -0600 Subject: [PATCH 012/204] fix docstring --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index d33a3752..cec8f7ac 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -29,10 +29,9 @@ def activate( """ activate(schema_name, *, create_schema=True, create_tables=True, activated_ephys=None) :param schema_name: schema name on the database server to activate the `spike_sorting` schema - :param ephys_module: the activated ephys element for which this ephys_report schema will be downstream from + :param ephys_module: the activated ephys element for which this `spike_sorting` schema will be downstream from :param create_schema: when True (default), create schema in the database if it does not yet exist. :param create_tables: when True (default), create tables in the database if they do not yet exist. - (The "activation" of this ephys_report module should be evoked by one of the ephys modules only) """ global ephys ephys = ephys_module From 53854d0a986d564c8e75295f83da4dbd5446d92b Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Mon, 23 Jan 2023 11:05:47 -0600 Subject: [PATCH 013/204] added description --- .../spike_sorting/ecephys_spike_sorting.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index cec8f7ac..d779d0c0 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -1,3 +1,26 @@ +""" +The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the +"ecephys_spike_sorting" pipeline. +The "ecephys_spike_sorting" was originally developed by the Allen Institute (https://github.com/AllenInstitute/ecephys_spike_sorting) for Neuropixels data acquired with Open Ephys acquisition system. +Then forked by Jennifer Colonell from the Janelia Research Campus (https://github.com/jenniferColonell/ecephys_spike_sorting) to support SpikeGLX acquisition system. + +At DataJoint, we fork from Jennifer's fork and implemented a version that supports both Open Ephys and Spike GLX. +https://github.com/datajoint-company/ecephys_spike_sorting + +The follow pipeline features three tables: +1. KilosortPreProcessing - for preprocessing steps (no GPU required) + - median_subtraction for Open Ephys + - or the CatGT step for SpikeGLX +2. KilosortClustering - kilosort (MATLAB) - requires GPU + - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) +3. KilosortPostProcessing - for postprocessing steps (no GPU required) + - kilosort_postprocessing + - noise_templates + - mean_waveforms + - quality_metrics +""" + + import datajoint as dj from element_array_ephys import get_logger from decimal import Decimal From fe9955ca4269b86958541440f1587bdecc5b0c1b Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 25 Jan 2023 15:33:08 -0600 Subject: [PATCH 014/204] refactor `_supported_kilosort_versions` --- .../spike_sorting/ecephys_spike_sorting.py | 24 +++++++------------ 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index d779d0c0..fca4e452 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -41,6 +41,12 @@ ephys = None +_supported_kilosort_versions = [ + "kilosort2", + "kilosort2.5", + "kilosort3", +] + def activate( schema_name, @@ -111,11 +117,9 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method", "params") - assert clustering_method in ( - "kilosort2", - "kilosort2.5", - "kilosort3", - ), 'Supporting "kilosort" clustering_method only' + assert ( + clustering_method in _supported_kilosort_versions + ), f'Clustering_method "{clustering_method}" is not supported' # add additional probe-recording and channels details into `params` params = {**params, **ephys.get_recording_channels_details(key)} @@ -186,11 +190,6 @@ def make(self, key): acq_software, clustering_method = ( ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - assert clustering_method in ( - "kilosort2", - "kilosort2.5", - "kilosort3", - ), 'Supporting "kilosort" clustering_method only' params = (KilosortPreProcessing & key).fetch1("params") @@ -257,11 +256,6 @@ def make(self, key): acq_software, clustering_method = ( ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - assert clustering_method in ( - "kilosort2", - "kilosort2.5", - "kilosort3", - ), 'Supporting "kilosort" clustering_method only' params = (KilosortPreProcessing & key).fetch1("params") From aea325d9bb6a975fd4e2c382f313b209a5be0017 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 25 Jan 2023 17:27:00 -0600 Subject: [PATCH 015/204] remove unused imports --- element_array_ephys/spike_sorting/ecephys_spike_sorting.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index fca4e452..4de349eb 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -30,8 +30,6 @@ from element_interface.utils import find_full_path from element_array_ephys.readers import ( spikeglx, - kilosort, - openephys, kilosort_triggering, ) From 4f648cc8054a6e237b971ab6c8cb4b88c6b0c568 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Wed, 1 Feb 2023 18:37:39 -0600 Subject: [PATCH 016/204] add new file for spike interface modularized clustering approach --- .../spike_sorting/si_clustering.py | 534 ++++++++++++++++++ 1 file changed, 534 insertions(+) create mode 100644 element_array_ephys/spike_sorting/si_clustering.py diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py new file mode 100644 index 00000000..32384d01 --- /dev/null +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -0,0 +1,534 @@ +""" +The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the +"ecephys_spike_sorting" pipeline. +The "ecephys_spike_sorting" was originally developed by the Allen Institute (https://github.com/AllenInstitute/ecephys_spike_sorting) for Neuropixels data acquired with Open Ephys acquisition system. +Then forked by Jennifer Colonell from the Janelia Research Campus (https://github.com/jenniferColonell/ecephys_spike_sorting) to support SpikeGLX acquisition system. + +At DataJoint, we fork from Jennifer's fork and implemented a version that supports both Open Ephys and Spike GLX. +https://github.com/datajoint-company/ecephys_spike_sorting + +The follow pipeline features intermediary tables: +1. KilosortPreProcessing - for preprocessing steps (no GPU required) + - median_subtraction for Open Ephys + - or the CatGT step for SpikeGLX +2. KilosortClustering - kilosort (MATLAB) - requires GPU + - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) +3. KilosortPostProcessing - for postprocessing steps (no GPU required) + - kilosort_postprocessing + - noise_templates + - mean_waveforms + - quality_metrics + + +""" +import datajoint as dj +import os +from element_array_ephys import get_logger +from decimal import Decimal +import json +import numpy as np +from datetime import datetime, timedelta + +from element_interface.utils import find_full_path +from element_array_ephys.readers import ( + spikeglx, + kilosort_triggering, +) +import element_array_ephys.ephys_no_curation as ephys +import element_array_ephys.probe as probe +# from element_array_ephys.ephys_no_curation import ( +# get_ephys_root_data_dir, +# get_session_directory, +# get_openephys_filepath, +# get_spikeglx_meta_filepath, +# get_recording_channels_details, +# ) +import spikeinterface as si +import spikeinterface.extractors as se +import spikeinterface.sorters as ss +import spikeinterface.comparison as sc +import spikeinterface.widgets as sw +import spikeinterface.preprocessing as sip +import probeinterface as pi + +log = get_logger(__name__) + +schema = dj.schema() + +ephys = None + +_supported_kilosort_versions = [ + "kilosort2", + "kilosort2.5", + "kilosort3", +] + + +def activate( + schema_name, + *, + ephys_module, + create_schema=True, + create_tables=True, +): + """ + activate(schema_name, *, create_schema=True, create_tables=True, activated_ephys=None) + :param schema_name: schema name on the database server to activate the `spike_sorting` schema + :param ephys_module: the activated ephys element for which this `spike_sorting` schema will be downstream from + :param create_schema: when True (default), create schema in the database if it does not yet exist. + :param create_tables: when True (default), create tables in the database if they do not yet exist. + """ + global ephys + ephys = ephys_module + schema.activate( + schema_name, + create_schema=create_schema, + create_tables=create_tables, + add_objects=ephys.__dict__, + ) + +@schema +class SI_preprocessing(dj.Imported): + """A table to handle preprocessing of each clustering task.""" + + definition = """ + -> ephys.ClusteringTask + --- + params: longblob # finalized parameterset for this run + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + @property + def key_source(self): + return ( + ephys.ClusteringTask * ephys.ClusteringParamSet + & {"task_mode": "trigger"} + & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' + ) - ephys.Clustering + def make(self, key): + """Triggers or imports clustering analysis.""" + execution_time = datetime.utcnow() + + task_mode, output_dir = (ephys.ClusteringTask & key).fetch1( + "task_mode", "clustering_output_dir" + ) + + assert task_mode == "trigger", 'Supporting "trigger" task_mode only' + + if not output_dir: + output_dir = ephys.ClusteringTask.infer_output_dir( + key, relative=True, mkdir=True + ) + # update clustering_output_dir + ephys.ClusteringTask.update1( + {**key, "clustering_output_dir": output_dir.as_posix()} + ) + + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method, params = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method", "params") + + assert ( + clustering_method in _supported_kilosort_versions + ), f'Clustering_method "{clustering_method}" is not supported' + + # add additional probe-recording and channels details into `params` + params = {**params, **ephys.get_recording_channels_details(key)} + params["fs"] = params["sample_rate"] + + if acq_software == "SpikeGLX": + sglx_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) + sglx_filepath = ephys.get_spikeglx_meta_filepath(key) + stream_name = os.path.split(sglx_filepath)[1] + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # Create SI recording extractor object + # sglx_si_recording = se.SpikeGLXRecordingExtractor(folder_path=sglx_full_path, stream_name=stream_name) + sglx_si_recording = se.read_spikeglx(folder_path=sglx_full_path, stream_name=stream_name) + electrode_query = (probe.ProbeType.Electrode + * probe.ElectrodeConfig.Electrode + * ephys.EphysRecording & key) + + xy_coords = [list(i) for i in zip(electrode_query.fetch('x_coord'),electrode_query.fetch('y_coord'))] + channels_details = ephys.get_recording_channels_details(key) + + # Create SI probe object + probe = pi.Probe(ndim=2, si_units='um') + probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) + probe.create_auto_shape(probe_type='tip') + channel_indices = np.arange(channels_details['num_channels']) + probe.set_device_channel_indices(channel_indices) + oe_si_recording.set_probe(probe=probe) + + # run preprocessing and save results to output folder + sglx_si_recording_filtered = sip.bandpass_filter(sglx_si_recording, freq_min=300, freq_max=6000) + sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") + sglx_recording_cmr.save_to_folder('sglx_recording_cmr', kilosort_dir) + + + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + oe_full_path = find_full_path(get_ephys_root_data_dir(),get_session_directory(key)) + oe_filepath = get_openephys_filepath(key) + stream_name = os.path.split(oe_filepath)[1] + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # Create SI recording extractor object + # oe_si_recording = se.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) + oe_si_recording = se.read_openephys(folder_path=oe_full_path, stream_name=stream_name) + electrode_query = (probe.ProbeType.Electrode + * probe.ElectrodeConfig.Electrode + * ephys.EphysRecording & key) + + xy_coords = [list(i) for i in zip(electrode_query.fetch('x_coord'),electrode_query.fetch('y_coord'))] + channels_details = get_recording_channels_details(key) + + # Create SI probe object + probe = pi.Probe(ndim=2, si_units='um') + probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) + probe.create_auto_shape(probe_type='tip') + channel_indices = np.arange(channels_details['num_channels']) + probe.set_device_channel_indices(channel_indices) + oe_si_recording.set_probe(probe=probe) + + # run preprocessing and save results to output folder + oe_si_recording_filtered = sip.bandpass_filter(oe_si_recording, freq_min=300, freq_max=6000) + oe_recording_cmr = sip.common_reference(oe_si_recording_filtered, reference="global", operator="median") + oe_recording_cmr.save_to_folder('oe_recording_cmr', kilosort_dir) + + self.insert1( + { + **key, + "params": params, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) +@schema +class SI_KilosortClustering(dj.Imported): + """A processing table to handle each clustering task.""" + + definition = """ + -> KilosortPreProcessing + --- + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + def make(self, key): + execution_time = datetime.utcnow() + + output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method") + + params = (KilosortPreProcessing & key).fetch1("params") + + if acq_software == "SpikeGLX": + sglx_probe = ephys.get_openephys_probe_data(key) + oe_si_recording = se.load_from_folder + assert len(oe_probe.recording_info["recording_files"]) == 1 + if clustering_method.startswith('kilosort2.5'): + sorter_name = "kilosort2_5" + else: + sorter_name = clustering_method + sorting_kilosort = si.run_sorter( + sorter_name = sorter_name, + recording = oe_si_recording, + output_folder = kilosort_dir, + docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", + **params + ) + sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir) + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + oe_si_recording = se.load_from_folder + assert len(oe_probe.recording_info["recording_files"]) == 1 + if clustering_method.startswith('kilosort2.5'): + sorter_name = "kilosort2_5" + else: + sorter_name = clustering_method + sorting_kilosort = si.run_sorter( + sorter_name = sorter_name, + recording = oe_si_recording, + output_folder = kilosort_dir, + docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", + **params + ) + sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir) + + self.insert1( + { + **key, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) + + + + + +@schema +class KilosortPreProcessing(dj.Imported): + """A processing table to handle each clustering task.""" + + definition = """ + -> ephys.ClusteringTask + --- + params: longblob # finalized parameterset for this run + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + @property + def key_source(self): + return ( + ephys.ClusteringTask * ephys.ClusteringParamSet + & {"task_mode": "trigger"} + & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' + ) - ephys.Clustering + + def make(self, key): + """Triggers or imports clustering analysis.""" + execution_time = datetime.utcnow() + + task_mode, output_dir = (ephys.ClusteringTask & key).fetch1( + "task_mode", "clustering_output_dir" + ) + + assert task_mode == "trigger", 'Supporting "trigger" task_mode only' + + if not output_dir: + output_dir = ephys.ClusteringTask.infer_output_dir( + key, relative=True, mkdir=True + ) + # update clustering_output_dir + ephys.ClusteringTask.update1( + {**key, "clustering_output_dir": output_dir.as_posix()} + ) + + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method, params = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method", "params") + + assert ( + clustering_method in _supported_kilosort_versions + ), f'Clustering_method "{clustering_method}" is not supported' + + # add additional probe-recording and channels details into `params` + params = {**params, **ephys.get_recording_channels_details(key)} + params["fs"] = params["sample_rate"] + + + + + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) + spikeglx_recording.validate_file("ap") + run_CatGT = ( + params.get("run_CatGT", True) + and "_tcat." not in spikeglx_meta_filepath.stem + ) + + run_kilosort = kilosort_triggering.SGLXKilosortPipeline( + npx_input_dir=spikeglx_meta_filepath.parent, + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + run_CatGT=run_CatGT, + ) + run_kilosort.run_CatGT() + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # run kilosort + run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( + npx_input_dir=oe_probe.recording_info["recording_files"][0], + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + ) + run_kilosort._modules = ["depth_estimation", "median_subtraction"] + run_kilosort.run_modules() + + self.insert1( + { + **key, + "params": params, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) + + +@schema +class KilosortClustering(dj.Imported): + """A processing table to handle each clustering task.""" + + definition = """ + -> KilosortPreProcessing + --- + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + def make(self, key): + execution_time = datetime.utcnow() + + output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method") + + params = (KilosortPreProcessing & key).fetch1("params") + + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) + spikeglx_recording.validate_file("ap") + + run_kilosort = kilosort_triggering.SGLXKilosortPipeline( + npx_input_dir=spikeglx_meta_filepath.parent, + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + run_CatGT=True, + ) + run_kilosort._modules = ["kilosort_helper"] + run_kilosort._CatGT_finished = True + run_kilosort.run_modules() + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # run kilosort + run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( + npx_input_dir=oe_probe.recording_info["recording_files"][0], + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + ) + run_kilosort._modules = ["kilosort_helper"] + run_kilosort.run_modules() + + self.insert1( + { + **key, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) + + +@schema +class KilosortPostProcessing(dj.Imported): + """A processing table to handle each clustering task.""" + + definition = """ + -> KilosortClustering + --- + modules_status: longblob # dictionary of summary status for all modules + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + def make(self, key): + execution_time = datetime.utcnow() + + output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method") + + params = (KilosortPreProcessing & key).fetch1("params") + + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) + spikeglx_recording.validate_file("ap") + + run_kilosort = kilosort_triggering.SGLXKilosortPipeline( + npx_input_dir=spikeglx_meta_filepath.parent, + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + run_CatGT=True, + ) + run_kilosort._modules = [ + "kilosort_postprocessing", + "noise_templates", + "mean_waveforms", + "quality_metrics", + ] + run_kilosort._CatGT_finished = True + run_kilosort.run_modules() + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + + assert len(oe_probe.recording_info["recording_files"]) == 1 + + # run kilosort + run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( + npx_input_dir=oe_probe.recording_info["recording_files"][0], + ks_output_dir=kilosort_dir, + params=params, + KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', + ) + run_kilosort._modules = [ + "kilosort_postprocessing", + "noise_templates", + "mean_waveforms", + "quality_metrics", + ] + run_kilosort.run_modules() + + with open(run_kilosort._modules_input_hash_fp) as f: + modules_status = json.load(f) + + self.insert1( + { + **key, + "modules_status": modules_status, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) + + # all finished, insert this `key` into ephys.Clustering + ephys.Clustering.insert1( + {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True + ) From 60091acad42b3081f3fbea53301d15993bc2e175 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 01:32:18 -0600 Subject: [PATCH 017/204] add spike interface clustering and post processing modules --- .../spike_sorting/si_clustering.py | 123 ++++++++++++++++-- 1 file changed, 111 insertions(+), 12 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 32384d01..9ddddb75 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -44,7 +44,9 @@ # get_recording_channels_details, # ) import spikeinterface as si +import spikeinterface.core as sic import spikeinterface.extractors as se +import spikeinterface.exporters as sie import spikeinterface.sorters as ss import spikeinterface.comparison as sc import spikeinterface.widgets as sw @@ -88,7 +90,7 @@ def activate( ) @schema -class SI_preprocessing(dj.Imported): +class SI_PreProcessing(dj.Imported): """A table to handle preprocessing of each clustering task.""" definition = """ @@ -172,8 +174,8 @@ def make(self, key): elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_full_path = find_full_path(get_ephys_root_data_dir(),get_session_directory(key)) - oe_filepath = get_openephys_filepath(key) + oe_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) + oe_filepath = ephys.get_openephys_filepath(key) stream_name = os.path.split(oe_filepath)[1] assert len(oe_probe.recording_info["recording_files"]) == 1 @@ -186,7 +188,7 @@ def make(self, key): * ephys.EphysRecording & key) xy_coords = [list(i) for i in zip(electrode_query.fetch('x_coord'),electrode_query.fetch('y_coord'))] - channels_details = get_recording_channels_details(key) + channels_details = ephys.get_recording_channels_details(key) # Create SI probe object probe = pi.Probe(ndim=2, si_units='um') @@ -199,7 +201,8 @@ def make(self, key): # run preprocessing and save results to output folder oe_si_recording_filtered = sip.bandpass_filter(oe_si_recording, freq_min=300, freq_max=6000) oe_recording_cmr = sip.common_reference(oe_si_recording_filtered, reference="global", operator="median") - oe_recording_cmr.save_to_folder('oe_recording_cmr', kilosort_dir) + # oe_recording_cmr.save_to_folder('oe_recording_cmr', kilosort_dir) + oe_recording_cmr.dump_to_json('oe_recording_cmr.json', kilosort_dir) self.insert1( { @@ -217,7 +220,7 @@ class SI_KilosortClustering(dj.Imported): """A processing table to handle each clustering task.""" definition = """ - -> KilosortPreProcessing + -> SI_PreProcessing --- execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -236,16 +239,18 @@ def make(self, key): params = (KilosortPreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": - sglx_probe = ephys.get_openephys_probe_data(key) - oe_si_recording = se.load_from_folder - assert len(oe_probe.recording_info["recording_files"]) == 1 + # sglx_probe = ephys.get_openephys_probe_data(key) + recording_file = kilosort_dir / 'sglx_recording_cmr.json' + # sglx_si_recording = se.load_from_folder(recording_file) + sglx_si_recording = sic.load_extractor(recording_file) + # assert len(oe_probe.recording_info["recording_files"]) == 1 if clustering_method.startswith('kilosort2.5'): sorter_name = "kilosort2_5" else: sorter_name = clustering_method sorting_kilosort = si.run_sorter( sorter_name = sorter_name, - recording = oe_si_recording, + recording = sglx_si_recording, output_folder = kilosort_dir, docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", **params @@ -253,7 +258,7 @@ def make(self, key): sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_si_recording = se.load_from_folder + oe_si_recording = se.load_from_folder assert len(oe_probe.recording_info["recording_files"]) == 1 if clustering_method.startswith('kilosort2.5'): sorter_name = "kilosort2_5" @@ -266,7 +271,8 @@ def make(self, key): docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", **params ) - sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir) + sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir, n_jobs=-1, chunk_size=30000) + # sorting_kilosort.save(folder=kilosort_dir, n_jobs=20, chunk_size=30000) self.insert1( { @@ -279,7 +285,100 @@ def make(self, key): } ) +@schema +class SI_KilosortPostProcessing(dj.Imported): + """A processing table to handle each clustering task.""" + + definition = """ + -> SI_KilosortClustering + --- + modules_status: longblob # dictionary of summary status for all modules + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration + """ + + def make(self, key): + execution_time = datetime.utcnow() + + output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + acq_software, clustering_method = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method") + + params = (KilosortPreProcessing & key).fetch1("params") + + if acq_software == "SpikeGLX": + sorting_file = kilosort_dir / 'sorting_kilosort' + recording_file = kilosort_dir / 'sglx_recording_cmr.json' + sglx_si_recording = sic.load_extractor(recording_file) + sorting_kilosort = sic.load_extractor(sorting_file) + + we_kilosort = si.WaveformExtractor.create(sglx_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True) + we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) + unit_id0 = sorting_kilosort.unit_ids[0] + waveforms = we_kilosort.get_waveforms(unit_id0) + template = we_kilosort.get_template(unit_id0) + snrs = si.compute_snrs(we_kilosort) + + + # QC Metrics + si_violations_ratio, isi_violations_rate, isi_violations_count = si.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) + metrics = si.compute_quality_metrics(we_kilosort, metric_names=["firing_rate","snr","presence_ratio","isi_violation", + "num_spikes","amplitude_cutoff","amplitude_median","sliding_rp_violation","rp_violation","drift"]) + sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) + # ["firing_rate","snr","presence_ratio","isi_violation", + # "number_violation","amplitude_cutoff","isolation_distance","l_ratio","d_prime","nn_hit_rate", + # "nn_miss_rate","silhouette_core","cumulative_drift","contamination_rate"]) + + we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) + + + + elif acq_software == "Open Ephys": + sorting_file = kilosort_dir / 'sorting_kilosort' + recording_file = kilosort_dir / 'sglx_recording_cmr.json' + sglx_si_recording = sic.load_extractor(recording_file) + sorting_kilosort = sic.load_extractor(sorting_file) + + we_kilosort = si.WaveformExtractor.create(sglx_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True) + we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) + unit_id0 = sorting_kilosort.unit_ids[0] + waveforms = we_kilosort.get_waveforms(unit_id0) + template = we_kilosort.get_template(unit_id0) + snrs = si.compute_snrs(we_kilosort) + + + # QC Metrics + si_violations_ratio, isi_violations_rate, isi_violations_count = si.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) + metrics = si.compute_quality_metrics(we_kilosort, metric_names=["firing_rate","snr","presence_ratio","isi_violation", + "num_spikes","amplitude_cutoff","amplitude_median","sliding_rp_violation","rp_violation","drift"]) + sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) + + we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) + + + + with open(run_kilosort._modules_input_hash_fp) as f: + modules_status = json.load(f) + self.insert1( + { + **key, + "modules_status": modules_status, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) + + # all finished, insert this `key` into ephys.Clustering + ephys.Clustering.insert1( + {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True + ) From bca5fa9593e7736548d253daae6ec0452bfec94e Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 01:47:15 -0600 Subject: [PATCH 018/204] edit typos --- .../spike_sorting/si_clustering.py | 261 +----------------- 1 file changed, 4 insertions(+), 257 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 9ddddb75..b3391f93 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -236,7 +236,8 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (KilosortPreProcessing & key).fetch1("params") + params = (SI_PreProcessing & key).fetch1("params") + if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) @@ -286,7 +287,7 @@ def make(self, key): ) @schema -class SI_KilosortPostProcessing(dj.Imported): +class SI_PostProcessing(dj.Imported): """A processing table to handle each clustering task.""" definition = """ @@ -307,7 +308,7 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (KilosortPreProcessing & key).fetch1("params") + params = (SI_PreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": sorting_file = kilosort_dir / 'sorting_kilosort' @@ -335,7 +336,6 @@ def make(self, key): we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) - elif acq_software == "Open Ephys": sorting_file = kilosort_dir / 'sorting_kilosort' recording_file = kilosort_dir / 'sglx_recording_cmr.json' @@ -358,8 +358,6 @@ def make(self, key): we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) - - with open(run_kilosort._modules_input_hash_fp) as f: modules_status = json.load(f) @@ -380,254 +378,3 @@ def make(self, key): {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) - - -@schema -class KilosortPreProcessing(dj.Imported): - """A processing table to handle each clustering task.""" - - definition = """ - -> ephys.ClusteringTask - --- - params: longblob # finalized parameterset for this run - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration - """ - - @property - def key_source(self): - return ( - ephys.ClusteringTask * ephys.ClusteringParamSet - & {"task_mode": "trigger"} - & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' - ) - ephys.Clustering - - def make(self, key): - """Triggers or imports clustering analysis.""" - execution_time = datetime.utcnow() - - task_mode, output_dir = (ephys.ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - - assert task_mode == "trigger", 'Supporting "trigger" task_mode only' - - if not output_dir: - output_dir = ephys.ClusteringTask.infer_output_dir( - key, relative=True, mkdir=True - ) - # update clustering_output_dir - ephys.ClusteringTask.update1( - {**key, "clustering_output_dir": output_dir.as_posix()} - ) - - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method, params = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method", "params") - - assert ( - clustering_method in _supported_kilosort_versions - ), f'Clustering_method "{clustering_method}" is not supported' - - # add additional probe-recording and channels details into `params` - params = {**params, **ephys.get_recording_channels_details(key)} - params["fs"] = params["sample_rate"] - - - - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - spikeglx_recording.validate_file("ap") - run_CatGT = ( - params.get("run_CatGT", True) - and "_tcat." not in spikeglx_meta_filepath.stem - ) - - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=run_CatGT, - ) - run_kilosort.run_CatGT() - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort._modules = ["depth_estimation", "median_subtraction"] - run_kilosort.run_modules() - - self.insert1( - { - **key, - "params": params, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) - - -@schema -class KilosortClustering(dj.Imported): - """A processing table to handle each clustering task.""" - - definition = """ - -> KilosortPreProcessing - --- - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration - """ - - def make(self, key): - execution_time = datetime.utcnow() - - output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method") - - params = (KilosortPreProcessing & key).fetch1("params") - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - spikeglx_recording.validate_file("ap") - - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=True, - ) - run_kilosort._modules = ["kilosort_helper"] - run_kilosort._CatGT_finished = True - run_kilosort.run_modules() - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort._modules = ["kilosort_helper"] - run_kilosort.run_modules() - - self.insert1( - { - **key, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) - - -@schema -class KilosortPostProcessing(dj.Imported): - """A processing table to handle each clustering task.""" - - definition = """ - -> KilosortClustering - --- - modules_status: longblob # dictionary of summary status for all modules - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration - """ - - def make(self, key): - execution_time = datetime.utcnow() - - output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method") - - params = (KilosortPreProcessing & key).fetch1("params") - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - spikeglx_recording.validate_file("ap") - - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=True, - ) - run_kilosort._modules = [ - "kilosort_postprocessing", - "noise_templates", - "mean_waveforms", - "quality_metrics", - ] - run_kilosort._CatGT_finished = True - run_kilosort.run_modules() - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort._modules = [ - "kilosort_postprocessing", - "noise_templates", - "mean_waveforms", - "quality_metrics", - ] - run_kilosort.run_modules() - - with open(run_kilosort._modules_input_hash_fp) as f: - modules_status = json.load(f) - - self.insert1( - { - **key, - "modules_status": modules_status, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) - - # all finished, insert this `key` into ephys.Clustering - ephys.Clustering.insert1( - {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True - ) From 1c4b0b578f31b2779a65db8ef67a8738a6352ff1 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 14:49:47 -0600 Subject: [PATCH 019/204] removed module_status from table keys --- element_array_ephys/spike_sorting/si_clustering.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index b3391f93..d3c3bbf9 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -293,7 +293,6 @@ class SI_PostProcessing(dj.Imported): definition = """ -> SI_KilosortClustering --- - modules_status: longblob # dictionary of summary status for all modules execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration """ @@ -358,13 +357,10 @@ def make(self, key): we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) - with open(run_kilosort._modules_input_hash_fp) as f: - modules_status = json.load(f) self.insert1( { **key, - "modules_status": modules_status, "execution_time": execution_time, "execution_duration": ( datetime.utcnow() - execution_time From 56c9941f12072b3d572520f69ee999025117ffb5 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 17:16:47 -0600 Subject: [PATCH 020/204] remove _ from SI table names --- .../spike_sorting/si_clustering.py | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index d3c3bbf9..a72989ed 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -8,8 +8,8 @@ https://github.com/datajoint-company/ecephys_spike_sorting The follow pipeline features intermediary tables: -1. KilosortPreProcessing - for preprocessing steps (no GPU required) - - median_subtraction for Open Ephys +1. SIPreProcessing - for preprocessing steps (no GPU required) + - - or the CatGT step for SpikeGLX 2. KilosortClustering - kilosort (MATLAB) - requires GPU - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) @@ -90,7 +90,7 @@ def activate( ) @schema -class SI_PreProcessing(dj.Imported): +class SIPreProcessing(dj.Imported): """A table to handle preprocessing of each clustering task.""" definition = """ @@ -168,8 +168,8 @@ def make(self, key): # run preprocessing and save results to output folder sglx_si_recording_filtered = sip.bandpass_filter(sglx_si_recording, freq_min=300, freq_max=6000) - sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") - sglx_recording_cmr.save_to_folder('sglx_recording_cmr', kilosort_dir) + # sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") + sglx_si_recording_filtered.save_to_folder('sglx_si_recording_filtered', kilosort_dir) elif acq_software == "Open Ephys": @@ -216,7 +216,7 @@ def make(self, key): } ) @schema -class SI_KilosortClustering(dj.Imported): +class SIClustering(dj.Imported): """A processing table to handle each clustering task.""" definition = """ @@ -236,8 +236,7 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (SI_PreProcessing & key).fetch1("params") - + params = (SIPreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) @@ -287,11 +286,11 @@ def make(self, key): ) @schema -class SI_PostProcessing(dj.Imported): +class SIPostProcessing(dj.Imported): """A processing table to handle each clustering task.""" definition = """ - -> SI_KilosortClustering + -> SIClustering --- execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -307,7 +306,7 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (SI_PreProcessing & key).fetch1("params") + params = (SIPreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": sorting_file = kilosort_dir / 'sorting_kilosort' From ce14098041a5292ec8dd9abd9776074d975ad3b2 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 17:23:56 -0600 Subject: [PATCH 021/204] bugfix --- element_array_ephys/spike_sorting/si_clustering.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index a72989ed..97a7dd53 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -202,7 +202,8 @@ def make(self, key): oe_si_recording_filtered = sip.bandpass_filter(oe_si_recording, freq_min=300, freq_max=6000) oe_recording_cmr = sip.common_reference(oe_si_recording_filtered, reference="global", operator="median") # oe_recording_cmr.save_to_folder('oe_recording_cmr', kilosort_dir) - oe_recording_cmr.dump_to_json('oe_recording_cmr.json', kilosort_dir) + # oe_recording_cmr.dump_to_json('oe_recording_cmr.json', kilosort_dir) + oe_si_recording_filtered.save_to_folder('', kilosort_dir) self.insert1( { @@ -220,7 +221,7 @@ class SIClustering(dj.Imported): """A processing table to handle each clustering task.""" definition = """ - -> SI_PreProcessing + -> SIPreProcessing --- execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration From 7c836f12fd1d47e5b7cf15435eaa19ad1faa7ae4 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 17:33:52 -0600 Subject: [PATCH 022/204] change si related table names --- element_array_ephys/spike_sorting/si_clustering.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 97a7dd53..d97066a6 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -90,7 +90,7 @@ def activate( ) @schema -class SIPreProcessing(dj.Imported): +class PreProcessing(dj.Imported): """A table to handle preprocessing of each clustering task.""" definition = """ @@ -164,7 +164,7 @@ def make(self, key): probe.create_auto_shape(probe_type='tip') channel_indices = np.arange(channels_details['num_channels']) probe.set_device_channel_indices(channel_indices) - oe_si_recording.set_probe(probe=probe) + sglx_si_recording.set_probe(probe=probe) # run preprocessing and save results to output folder sglx_si_recording_filtered = sip.bandpass_filter(sglx_si_recording, freq_min=300, freq_max=6000) @@ -217,7 +217,7 @@ def make(self, key): } ) @schema -class SIClustering(dj.Imported): +class ClusteringModule(dj.Imported): """A processing table to handle each clustering task.""" definition = """ @@ -237,7 +237,7 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (SIPreProcessing & key).fetch1("params") + params = (PreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) @@ -287,11 +287,11 @@ def make(self, key): ) @schema -class SIPostProcessing(dj.Imported): +class PostProcessing(dj.Imported): """A processing table to handle each clustering task.""" definition = """ - -> SIClustering + -> ClusteringModule --- execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -307,7 +307,7 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (SIPreProcessing & key).fetch1("params") + params = (PreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": sorting_file = kilosort_dir / 'sorting_kilosort' From dd6366498d1a4bd974803b89abdfd7ab30a96623 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 17:38:49 -0600 Subject: [PATCH 023/204] bugfix --- element_array_ephys/spike_sorting/si_clustering.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index d97066a6..cabe5c25 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -221,7 +221,7 @@ class ClusteringModule(dj.Imported): """A processing table to handle each clustering task.""" definition = """ - -> SIPreProcessing + -> PreProcessing --- execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration From 54888ed7c747bf5452e90d590641824bd70684f1 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 3 Feb 2023 18:02:46 -0600 Subject: [PATCH 024/204] update initial comment --- .../spike_sorting/si_clustering.py | 26 ++++++++----------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index cabe5c25..e69855f1 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -1,22 +1,20 @@ """ The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the -"ecephys_spike_sorting" pipeline. -The "ecephys_spike_sorting" was originally developed by the Allen Institute (https://github.com/AllenInstitute/ecephys_spike_sorting) for Neuropixels data acquired with Open Ephys acquisition system. -Then forked by Jennifer Colonell from the Janelia Research Campus (https://github.com/jenniferColonell/ecephys_spike_sorting) to support SpikeGLX acquisition system. +"spikeinterface" pipeline. +Spikeinterface developed by Alessio Buccino, Samuel Garcia, Cole Hurwitz, Jeremy Magland, and Matthias Hennig (https://github.com/SpikeInterface) -At DataJoint, we fork from Jennifer's fork and implemented a version that supports both Open Ephys and Spike GLX. -https://github.com/datajoint-company/ecephys_spike_sorting +The DataJoint pipeline currently incorporated Spikeinterfaces approach of running Kilosort using a container The follow pipeline features intermediary tables: -1. SIPreProcessing - for preprocessing steps (no GPU required) - - - - or the CatGT step for SpikeGLX -2. KilosortClustering - kilosort (MATLAB) - requires GPU +1. PreProcessing - for preprocessing steps (no GPU required) + - create recording extractor and link it to a probe + - bandpass filtering + - common mode referencing +2. ClusteringModule - kilosort (MATLAB) - requires GPU and docker/singularity containers - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) -3. KilosortPostProcessing - for postprocessing steps (no GPU required) - - kilosort_postprocessing - - noise_templates - - mean_waveforms +3. PostProcessing - for postprocessing steps (no GPU required) + - create waveform extractor object + - extract templates, waveforms and snrs - quality_metrics @@ -48,8 +46,6 @@ import spikeinterface.extractors as se import spikeinterface.exporters as sie import spikeinterface.sorters as ss -import spikeinterface.comparison as sc -import spikeinterface.widgets as sw import spikeinterface.preprocessing as sip import probeinterface as pi From f804233b63a74ad935b953c3051ad74de0a4f4b2 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Wed, 8 Feb 2023 19:17:51 -0600 Subject: [PATCH 025/204] fix preprocessing file loading issues --- .../spike_sorting/si_clustering.py | 64 ++++++++----------- 1 file changed, 27 insertions(+), 37 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index e69855f1..e6e129bc 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -32,15 +32,8 @@ spikeglx, kilosort_triggering, ) -import element_array_ephys.ephys_no_curation as ephys import element_array_ephys.probe as probe -# from element_array_ephys.ephys_no_curation import ( -# get_ephys_root_data_dir, -# get_session_directory, -# get_openephys_filepath, -# get_spikeglx_meta_filepath, -# get_recording_channels_details, -# ) + import spikeinterface as si import spikeinterface.core as sic import spikeinterface.extractors as se @@ -92,6 +85,7 @@ class PreProcessing(dj.Imported): definition = """ -> ephys.ClusteringTask --- + file_name: varchar(60) # filename where recording object is saved to params: longblob # finalized parameterset for this run execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -137,30 +131,27 @@ def make(self, key): params = {**params, **ephys.get_recording_channels_details(key)} params["fs"] = params["sample_rate"] + if acq_software == "SpikeGLX": sglx_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) sglx_filepath = ephys.get_spikeglx_meta_filepath(key) stream_name = os.path.split(sglx_filepath)[1] - assert len(oe_probe.recording_info["recording_files"]) == 1 + # assert len(oe_probe.recording_info["recording_files"]) == 1 # Create SI recording extractor object # sglx_si_recording = se.SpikeGLXRecordingExtractor(folder_path=sglx_full_path, stream_name=stream_name) sglx_si_recording = se.read_spikeglx(folder_path=sglx_full_path, stream_name=stream_name) - electrode_query = (probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * ephys.EphysRecording & key) - xy_coords = [list(i) for i in zip(electrode_query.fetch('x_coord'),electrode_query.fetch('y_coord'))] + xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] channels_details = ephys.get_recording_channels_details(key) # Create SI probe object - probe = pi.Probe(ndim=2, si_units='um') - probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) - probe.create_auto_shape(probe_type='tip') - channel_indices = np.arange(channels_details['num_channels']) - probe.set_device_channel_indices(channel_indices) - sglx_si_recording.set_probe(probe=probe) + si_probe = pi.Probe(ndim=2, si_units='um') + si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) + si_probe.create_auto_shape(probe_type='tip') + si_probe.set_device_channel_indices(channels_details['channel_ind']) + sglx_si_recording.set_probe(probe=si_probe) # run preprocessing and save results to output folder sglx_si_recording_filtered = sip.bandpass_filter(sglx_si_recording, freq_min=300, freq_max=6000) @@ -170,29 +161,25 @@ def make(self, key): elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) - oe_filepath = ephys.get_openephys_filepath(key) - stream_name = os.path.split(oe_filepath)[1] - + oe_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) + assert len(oe_probe.recording_info["recording_files"]) == 1 + stream_name = os.path.split(oe_probe.recording_info['recording_files'][0])[1] # Create SI recording extractor object # oe_si_recording = se.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) - oe_si_recording = se.read_openephys(folder_path=oe_full_path, stream_name=stream_name) - electrode_query = (probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * ephys.EphysRecording & key) + oe_si_recording = se.read_openephys(folder_path=oe_session_full_path, stream_name=stream_name) - xy_coords = [list(i) for i in zip(electrode_query.fetch('x_coord'),electrode_query.fetch('y_coord'))] + xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] + channels_details = ephys.get_recording_channels_details(key) # Create SI probe object - probe = pi.Probe(ndim=2, si_units='um') - probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) - probe.create_auto_shape(probe_type='tip') - channel_indices = np.arange(channels_details['num_channels']) - probe.set_device_channel_indices(channel_indices) - oe_si_recording.set_probe(probe=probe) + si_probe = pi.Probe(ndim=2, si_units='um') + si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) + si_probe.create_auto_shape(probe_type='tip') + si_probe.set_device_channel_indices(channels_details['channel_ind']) + oe_si_recording.set_probe(probe=si_probe) # run preprocessing and save results to output folder oe_si_recording_filtered = sip.bandpass_filter(oe_si_recording, freq_min=300, freq_max=6000) @@ -219,8 +206,10 @@ class ClusteringModule(dj.Imported): definition = """ -> PreProcessing --- - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration + recording_file: varchar(60) # filename of saved recording object + sorting_file: varchar(60) # filename of saved sorting object + execution_time: datetime # datetime of the start of this step + execution_duration: float # (hour) execution duration """ def make(self, key): @@ -234,10 +223,11 @@ def make(self, key): ).fetch1("acq_software", "clustering_method") params = (PreProcessing & key).fetch1("params") + file_name = (PreProcessing & key).fetch1("file_name") if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) - recording_file = kilosort_dir / 'sglx_recording_cmr.json' + recording_file = kilosort_dir / file_name # sglx_si_recording = se.load_from_folder(recording_file) sglx_si_recording = sic.load_extractor(recording_file) # assert len(oe_probe.recording_info["recording_files"]) == 1 From 8e1b73dd5013b9eb6da542678726fea800a3dcf6 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Thu, 9 Feb 2023 15:58:12 -0600 Subject: [PATCH 026/204] set file saving and file loading to pickle format --- element_array_ephys/spike_sorting/si_clustering.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index e6e129bc..cf909725 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -186,11 +186,14 @@ def make(self, key): oe_recording_cmr = sip.common_reference(oe_si_recording_filtered, reference="global", operator="median") # oe_recording_cmr.save_to_folder('oe_recording_cmr', kilosort_dir) # oe_recording_cmr.dump_to_json('oe_recording_cmr.json', kilosort_dir) - oe_si_recording_filtered.save_to_folder('', kilosort_dir) + save_file_name = 'si_recording.pkl' + save_file_path = kilosort_dir / save_file_name + oe_si_recording_filtered.dump_to_pickle(file_path=save_file_path) self.insert1( { **key, + "file_name": save_file_name, "params": params, "execution_time": execution_time, "execution_duration": ( From f0b7497e7b173396a35efbd9a1545981a095d96c Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 10 Feb 2023 18:09:40 -0600 Subject: [PATCH 027/204] sglx preprocessing modifications --- .../spike_sorting/si_clustering.py | 58 ++++++++++--------- 1 file changed, 31 insertions(+), 27 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index cf909725..6ec4b6e2 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -34,7 +34,7 @@ ) import element_array_ephys.probe as probe -import spikeinterface as si +import spikeinterface.full as si import spikeinterface.core as sic import spikeinterface.extractors as se import spikeinterface.exporters as sie @@ -85,7 +85,7 @@ class PreProcessing(dj.Imported): definition = """ -> ephys.ClusteringTask --- - file_name: varchar(60) # filename where recording object is saved to + recording_filename: varchar(60) # filename where recording object is saved to params: longblob # finalized parameterset for this run execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -133,22 +133,19 @@ def make(self, key): if acq_software == "SpikeGLX": - sglx_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) + # sglx_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) sglx_filepath = ephys.get_spikeglx_meta_filepath(key) - stream_name = os.path.split(sglx_filepath)[1] - - # assert len(oe_probe.recording_info["recording_files"]) == 1 # Create SI recording extractor object - # sglx_si_recording = se.SpikeGLXRecordingExtractor(folder_path=sglx_full_path, stream_name=stream_name) - sglx_si_recording = se.read_spikeglx(folder_path=sglx_full_path, stream_name=stream_name) - - xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] + sglx_si_recording = se.read_spikeglx(folder_path=sglx_filepath.parent) + channels_details = ephys.get_recording_channels_details(key) + xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] + # Create SI probe object si_probe = pi.Probe(ndim=2, si_units='um') - si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) + si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 12}) si_probe.create_auto_shape(probe_type='tip') si_probe.set_device_channel_indices(channels_details['channel_ind']) sglx_si_recording.set_probe(probe=si_probe) @@ -156,7 +153,10 @@ def make(self, key): # run preprocessing and save results to output folder sglx_si_recording_filtered = sip.bandpass_filter(sglx_si_recording, freq_min=300, freq_max=6000) # sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") - sglx_si_recording_filtered.save_to_folder('sglx_si_recording_filtered', kilosort_dir) + + save_file_name = 'si_recording.pkl' + save_file_path = kilosort_dir / save_file_name + sglx_si_recording_filtered.dump_to_pickle(file_path=save_file_path) elif acq_software == "Open Ephys": @@ -170,22 +170,21 @@ def make(self, key): # oe_si_recording = se.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) oe_si_recording = se.read_openephys(folder_path=oe_session_full_path, stream_name=stream_name) + channels_details = ephys.get_recording_channels_details(key) xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] - channels_details = ephys.get_recording_channels_details(key) - # Create SI probe object si_probe = pi.Probe(ndim=2, si_units='um') - si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 5}) + si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 12}) si_probe.create_auto_shape(probe_type='tip') si_probe.set_device_channel_indices(channels_details['channel_ind']) oe_si_recording.set_probe(probe=si_probe) # run preprocessing and save results to output folder + # Switch case to allow for specified preprocessing steps oe_si_recording_filtered = sip.bandpass_filter(oe_si_recording, freq_min=300, freq_max=6000) oe_recording_cmr = sip.common_reference(oe_si_recording_filtered, reference="global", operator="median") - # oe_recording_cmr.save_to_folder('oe_recording_cmr', kilosort_dir) - # oe_recording_cmr.dump_to_json('oe_recording_cmr.json', kilosort_dir) + save_file_name = 'si_recording.pkl' save_file_path = kilosort_dir / save_file_name oe_si_recording_filtered.dump_to_pickle(file_path=save_file_path) @@ -193,7 +192,7 @@ def make(self, key): self.insert1( { **key, - "file_name": save_file_name, + "recording_filename": save_file_name, "params": params, "execution_time": execution_time, "execution_duration": ( @@ -202,15 +201,14 @@ def make(self, key): / 3600, } ) -@schema + @schema class ClusteringModule(dj.Imported): """A processing table to handle each clustering task.""" definition = """ -> PreProcessing --- - recording_file: varchar(60) # filename of saved recording object - sorting_file: varchar(60) # filename of saved sorting object + sorting_filename: varchar(60) # filename of saved sorting object execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration """ @@ -226,13 +224,13 @@ def make(self, key): ).fetch1("acq_software", "clustering_method") params = (PreProcessing & key).fetch1("params") - file_name = (PreProcessing & key).fetch1("file_name") + recording_filename = (PreProcessing & key).fetch1("recording_filename") if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) - recording_file = kilosort_dir / file_name + recording_fullpath = kilosort_dir / recording_filename # sglx_si_recording = se.load_from_folder(recording_file) - sglx_si_recording = sic.load_extractor(recording_file) + sglx_si_recording = sic.load_extractor(recording_fullpath) # assert len(oe_probe.recording_info["recording_files"]) == 1 if clustering_method.startswith('kilosort2.5'): sorter_name = "kilosort2_5" @@ -245,10 +243,11 @@ def make(self, key): docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", **params ) - sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir) + sorting_save_path = kilosort_dir / 'sorting_kilosort.pkl' + sorting_kilosort.dump_to_pickle(sorting_save_path) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_si_recording = se.load_from_folder + oe_si_recording = sic.load_extractor(recording_fullpath) assert len(oe_probe.recording_info["recording_files"]) == 1 if clustering_method.startswith('kilosort2.5'): sorter_name = "kilosort2_5" @@ -261,7 +260,8 @@ def make(self, key): docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", **params ) - sorting_kilosort.save_to_folder('sorting_kilosort', kilosort_dir, n_jobs=-1, chunk_size=30000) + sorting_save_path = kilosort_dir / 'sorting_kilosort.pkl' + sorting_kilosort.dump_to_pickle(sorting_save_path) # sorting_kilosort.save(folder=kilosort_dir, n_jobs=20, chunk_size=30000) self.insert1( @@ -363,3 +363,7 @@ def make(self, key): {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) + + +def preProcessing_switch(preprocess_list): + \ No newline at end of file From 13fe31c49fa5e5286c4ef916605bf017a33a9d87 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Mon, 13 Feb 2023 17:48:12 -0600 Subject: [PATCH 028/204] sglx testing progress --- element_array_ephys/spike_sorting/si_clustering.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 6ec4b6e2..08ff86bd 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -128,8 +128,8 @@ def make(self, key): ), f'Clustering_method "{clustering_method}" is not supported' # add additional probe-recording and channels details into `params` - params = {**params, **ephys.get_recording_channels_details(key)} - params["fs"] = params["sample_rate"] + # params = {**params, **ephys.get_recording_channels_details(key)} + # params["fs"] = params["sample_rate"] if acq_software == "SpikeGLX": From d41c7f3c6d0fb2b0c97578d6c8cd28ab1b6a2832 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Tue, 14 Feb 2023 18:46:54 -0600 Subject: [PATCH 029/204] wip parametrize preprocessing --- .../spike_sorting/si_clustering.py | 117 +++++++++++++++--- 1 file changed, 99 insertions(+), 18 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 08ff86bd..f0144ea6 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -131,6 +131,46 @@ def make(self, key): # params = {**params, **ephys.get_recording_channels_details(key)} # params["fs"] = params["sample_rate"] + + preprocess_list = params.pop('PreProcessing_params') + + # If else + if preprocess_list['Filter']: + oe_si_recording = sip.FilterRecording(oe_si_recording) + elif preprocess_list['BandpassFilter']: + oe_si_recording = sip.BandpassFilterRecording(oe_si_recording) + elif preprocess_list['HighpassFilter']: + oe_si_recording = sip.HighpassFilterRecording(oe_si_recording) + elif preprocess_list['NormalizeByQuantile']: + oe_si_recording = sip.NormalizeByQuantileRecording(oe_si_recording) + elif preprocess_list['Scale']: + oe_si_recording = sip.ScaleRecording(oe_si_recording) + elif preprocess_list['Center']: + oe_si_recording = sip.CenterRecording(oe_si_recording) + elif preprocess_list['ZScore']: + oe_si_recording = sip.ZScoreRecording(oe_si_recording) + elif preprocess_list['Whiten']: + oe_si_recording = sip.WhitenRecording(oe_si_recording) + elif preprocess_list['CommonReference']: + oe_si_recording = sip.CommonReferenceRecording(oe_si_recording) + elif preprocess_list['PhaseShift']: + oe_si_recording = sip.PhaseShiftRecording(oe_si_recording) + elif preprocess_list['Rectify']: + oe_si_recording = sip.RectifyRecording(oe_si_recording) + elif preprocess_list['Clip']: + oe_si_recording = sip.ClipRecording(oe_si_recording) + elif preprocess_list['BlankSaturation']: + oe_si_recording = sip.BlankSaturationRecording(oe_si_recording) + elif preprocess_list['RemoveArtifacts']: + oe_si_recording = sip.RemoveArtifactsRecording(oe_si_recording) + elif preprocess_list['RemoveBadChannels']: + oe_si_recording = sip.RemoveBadChannelsRecording(oe_si_recording) + elif preprocess_list['ZeroChannelPad']: + oe_si_recording = sip.ZeroChannelPadRecording(oe_si_recording) + elif preprocess_list['DeepInterpolation']: + oe_si_recording = sip.DeepInterpolationRecording(oe_si_recording) + elif preprocess_list['Resample']: + oe_si_recording = sip.ResampleRecording(oe_si_recording) if acq_software == "SpikeGLX": # sglx_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) @@ -232,17 +272,23 @@ def make(self, key): # sglx_si_recording = se.load_from_folder(recording_file) sglx_si_recording = sic.load_extractor(recording_fullpath) # assert len(oe_probe.recording_info["recording_files"]) == 1 + + ## Assume that the worker process will trigger this sorting step + # - Will need to store/load the sorter_name, sglx_si_recording object etc. + # - Store in shared EC2 space accessible by all containers (needs to be mounted) + # - Load into the cloud init script, and + # - Option A: Can call this function within a separate container within spike_sorting_worker if clustering_method.startswith('kilosort2.5'): sorter_name = "kilosort2_5" else: sorter_name = clustering_method - sorting_kilosort = si.run_sorter( - sorter_name = sorter_name, - recording = sglx_si_recording, - output_folder = kilosort_dir, - docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", - **params - ) + # sorting_kilosort = si.run_sorter( + # sorter_name = sorter_name, + # recording = sglx_si_recording, + # output_folder = kilosort_dir, + # docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", + # **params + # ) sorting_save_path = kilosort_dir / 'sorting_kilosort.pkl' sorting_kilosort.dump_to_pickle(sorting_save_path) elif acq_software == "Open Ephys": @@ -253,13 +299,13 @@ def make(self, key): sorter_name = "kilosort2_5" else: sorter_name = clustering_method - sorting_kilosort = si.run_sorter( - sorter_name = sorter_name, - recording = oe_si_recording, - output_folder = kilosort_dir, - docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", - **params - ) + # sorting_kilosort = si.run_sorter( + # sorter_name = sorter_name, + # recording = oe_si_recording, + # output_folder = kilosort_dir, + # docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", + # **params + # ) sorting_save_path = kilosort_dir / 'sorting_kilosort.pkl' sorting_kilosort.dump_to_pickle(sorting_save_path) # sorting_kilosort.save(folder=kilosort_dir, n_jobs=20, chunk_size=30000) @@ -363,7 +409,42 @@ def make(self, key): {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) - - -def preProcessing_switch(preprocess_list): - \ No newline at end of file +## Example SI parameter set +''' +{'detect_threshold': 6, + 'projection_threshold': [10, 4], + 'preclust_threshold': 8, + 'car': True, + 'minFR': 0.02, + 'minfr_goodchannels': 0.1, + 'nblocks': 5, + 'sig': 20, + 'freq_min': 150, + 'sigmaMask': 30, + 'nPCs': 3, + 'ntbuff': 64, + 'nfilt_factor': 4, + 'NT': None, + 'do_correction': True, + 'wave_length': 61, + 'keep_good_only': False, + 'PreProcessing_params': {'Filter': False, + 'BandpassFilter': True, + 'HighpassFilter': False, + 'NotchFilter': False, + 'NormalizeByQuantile': False, + 'Scale': False, + 'Center': False, + 'ZScore': False, + 'Whiten': False, + 'CommonReference': False, + 'PhaseShift': False, + 'Rectify': False, + 'Clip': False, + 'BlankSaturation': False, + 'RemoveArtifacts': False, + 'RemoveBadChannels': False, + 'ZeroChannelPad': False, + 'DeepInterpolation': False, + 'Resample': False}} +''' \ No newline at end of file From 109a71ad4e8c884659bee146dda2afdbfe4fa77a Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 17 Feb 2023 19:34:53 -0600 Subject: [PATCH 030/204] post processing waveform extractor extensions --- .../spike_sorting/si_clustering.py | 282 +++++++++++------- 1 file changed, 178 insertions(+), 104 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index f0144ea6..13f129d8 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -34,6 +34,7 @@ ) import element_array_ephys.probe as probe +import spikeinterface import spikeinterface.full as si import spikeinterface.core as sic import spikeinterface.extractors as se @@ -78,6 +79,7 @@ def activate( add_objects=ephys.__dict__, ) + @schema class PreProcessing(dj.Imported): """A table to handle preprocessing of each clustering task.""" @@ -85,7 +87,7 @@ class PreProcessing(dj.Imported): definition = """ -> ephys.ClusteringTask --- - recording_filename: varchar(60) # filename where recording object is saved to + recording_filename: varchar(30) # filename where recording object is saved to params: longblob # finalized parameterset for this run execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -98,6 +100,7 @@ def key_source(self): & {"task_mode": "trigger"} & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' ) - ephys.Clustering + def make(self, key): """Triggers or imports clustering analysis.""" execution_time = datetime.utcnow() @@ -131,101 +134,121 @@ def make(self, key): # params = {**params, **ephys.get_recording_channels_details(key)} # params["fs"] = params["sample_rate"] - - preprocess_list = params.pop('PreProcessing_params') + preprocess_list = params.pop("PreProcessing_params") - # If else - if preprocess_list['Filter']: + # If else + # need to figure out ordering + if preprocess_list["Filter"]: oe_si_recording = sip.FilterRecording(oe_si_recording) - elif preprocess_list['BandpassFilter']: + elif preprocess_list["BandpassFilter"]: oe_si_recording = sip.BandpassFilterRecording(oe_si_recording) - elif preprocess_list['HighpassFilter']: + elif preprocess_list["HighpassFilter"]: oe_si_recording = sip.HighpassFilterRecording(oe_si_recording) - elif preprocess_list['NormalizeByQuantile']: + elif preprocess_list["NormalizeByQuantile"]: oe_si_recording = sip.NormalizeByQuantileRecording(oe_si_recording) - elif preprocess_list['Scale']: + elif preprocess_list["Scale"]: oe_si_recording = sip.ScaleRecording(oe_si_recording) - elif preprocess_list['Center']: + elif preprocess_list["Center"]: oe_si_recording = sip.CenterRecording(oe_si_recording) - elif preprocess_list['ZScore']: + elif preprocess_list["ZScore"]: oe_si_recording = sip.ZScoreRecording(oe_si_recording) - elif preprocess_list['Whiten']: + elif preprocess_list["Whiten"]: oe_si_recording = sip.WhitenRecording(oe_si_recording) - elif preprocess_list['CommonReference']: + elif preprocess_list["CommonReference"]: oe_si_recording = sip.CommonReferenceRecording(oe_si_recording) - elif preprocess_list['PhaseShift']: + elif preprocess_list["PhaseShift"]: oe_si_recording = sip.PhaseShiftRecording(oe_si_recording) - elif preprocess_list['Rectify']: + elif preprocess_list["Rectify"]: oe_si_recording = sip.RectifyRecording(oe_si_recording) - elif preprocess_list['Clip']: + elif preprocess_list["Clip"]: oe_si_recording = sip.ClipRecording(oe_si_recording) - elif preprocess_list['BlankSaturation']: + elif preprocess_list["BlankSaturation"]: oe_si_recording = sip.BlankSaturationRecording(oe_si_recording) - elif preprocess_list['RemoveArtifacts']: + elif preprocess_list["RemoveArtifacts"]: oe_si_recording = sip.RemoveArtifactsRecording(oe_si_recording) - elif preprocess_list['RemoveBadChannels']: + elif preprocess_list["RemoveBadChannels"]: oe_si_recording = sip.RemoveBadChannelsRecording(oe_si_recording) - elif preprocess_list['ZeroChannelPad']: + elif preprocess_list["ZeroChannelPad"]: oe_si_recording = sip.ZeroChannelPadRecording(oe_si_recording) - elif preprocess_list['DeepInterpolation']: + elif preprocess_list["DeepInterpolation"]: oe_si_recording = sip.DeepInterpolationRecording(oe_si_recording) - elif preprocess_list['Resample']: + elif preprocess_list["Resample"]: oe_si_recording = sip.ResampleRecording(oe_si_recording) - + if acq_software == "SpikeGLX": # sglx_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) sglx_filepath = ephys.get_spikeglx_meta_filepath(key) # Create SI recording extractor object - sglx_si_recording = se.read_spikeglx(folder_path=sglx_filepath.parent) - + sglx_si_recording = se.read_spikeglx(folder_path=sglx_filepath.parent) + channels_details = ephys.get_recording_channels_details(key) - xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] - - - # Create SI probe object - si_probe = pi.Probe(ndim=2, si_units='um') - si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 12}) - si_probe.create_auto_shape(probe_type='tip') - si_probe.set_device_channel_indices(channels_details['channel_ind']) + xy_coords = [ + list(i) + for i in zip(channels_details["x_coords"], channels_details["y_coords"]) + ] + + # Create SI probe object + si_probe = pi.Probe(ndim=2, si_units="um") + si_probe.set_contacts( + positions=xy_coords, shapes="square", shape_params={"width": 12} + ) + si_probe.create_auto_shape(probe_type="tip") + si_probe.set_device_channel_indices(channels_details["channel_ind"]) sglx_si_recording.set_probe(probe=si_probe) # run preprocessing and save results to output folder - sglx_si_recording_filtered = sip.bandpass_filter(sglx_si_recording, freq_min=300, freq_max=6000) + sglx_si_recording_filtered = sip.bandpass_filter( + sglx_si_recording, freq_min=300, freq_max=6000 + ) # sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") - save_file_name = 'si_recording.pkl' + save_file_name = "si_recording.pkl" save_file_path = kilosort_dir / save_file_name sglx_si_recording_filtered.dump_to_pickle(file_path=save_file_path) - elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) - + oe_session_full_path = find_full_path( + ephys.get_ephys_root_data_dir(), ephys.get_session_directory(key) + ) + assert len(oe_probe.recording_info["recording_files"]) == 1 - stream_name = os.path.split(oe_probe.recording_info['recording_files'][0])[1] + stream_name = os.path.split(oe_probe.recording_info["recording_files"][0])[ + 1 + ] # Create SI recording extractor object - # oe_si_recording = se.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) - oe_si_recording = se.read_openephys(folder_path=oe_session_full_path, stream_name=stream_name) + # oe_si_recording = se.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) + oe_si_recording = se.read_openephys( + folder_path=oe_session_full_path, stream_name=stream_name + ) channels_details = ephys.get_recording_channels_details(key) - xy_coords = [list(i) for i in zip(channels_details['x_coords'],channels_details['y_coords'])] - - # Create SI probe object - si_probe = pi.Probe(ndim=2, si_units='um') - si_probe.set_contacts(positions=xy_coords, shapes='square', shape_params={'width': 12}) - si_probe.create_auto_shape(probe_type='tip') - si_probe.set_device_channel_indices(channels_details['channel_ind']) + xy_coords = [ + list(i) + for i in zip(channels_details["x_coords"], channels_details["y_coords"]) + ] + + # Create SI probe object + si_probe = pi.Probe(ndim=2, si_units="um") + si_probe.set_contacts( + positions=xy_coords, shapes="square", shape_params={"width": 12} + ) + si_probe.create_auto_shape(probe_type="tip") + si_probe.set_device_channel_indices(channels_details["channel_ind"]) oe_si_recording.set_probe(probe=si_probe) # run preprocessing and save results to output folder # Switch case to allow for specified preprocessing steps - oe_si_recording_filtered = sip.bandpass_filter(oe_si_recording, freq_min=300, freq_max=6000) - oe_recording_cmr = sip.common_reference(oe_si_recording_filtered, reference="global", operator="median") + oe_si_recording_filtered = sip.bandpass_filter( + oe_si_recording, freq_min=300, freq_max=6000 + ) + oe_recording_cmr = sip.common_reference( + oe_si_recording_filtered, reference="global", operator="median" + ) - save_file_name = 'si_recording.pkl' + save_file_name = "si_recording.pkl" save_file_path = kilosort_dir / save_file_name oe_si_recording_filtered.dump_to_pickle(file_path=save_file_path) @@ -240,15 +263,17 @@ def make(self, key): ).total_seconds() / 3600, } - ) - @schema -class ClusteringModule(dj.Imported): + ) + + +@schema +class SIClustering(dj.Imported): """A processing table to handle each clustering task.""" definition = """ -> PreProcessing --- - sorting_filename: varchar(60) # filename of saved sorting object + sorting_filename: varchar(30) # filename of saved sorting object execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration """ @@ -263,56 +288,56 @@ def make(self, key): ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("acq_software", "clustering_method") - params = (PreProcessing & key).fetch1("params") - recording_filename = (PreProcessing & key).fetch1("recording_filename") + params = (PreProcessing & key).fetch1("params") + recording_filename = (PreProcessing & key).fetch1("recording_filename") if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) recording_fullpath = kilosort_dir / recording_filename - # sglx_si_recording = se.load_from_folder(recording_file) + # sglx_si_recording = se.load_from_folder(recording_file) sglx_si_recording = sic.load_extractor(recording_fullpath) # assert len(oe_probe.recording_info["recording_files"]) == 1 ## Assume that the worker process will trigger this sorting step - # - Will need to store/load the sorter_name, sglx_si_recording object etc. + # - Will need to store/load the sorter_name, sglx_si_recording object etc. # - Store in shared EC2 space accessible by all containers (needs to be mounted) - # - Load into the cloud init script, and + # - Load into the cloud init script, and # - Option A: Can call this function within a separate container within spike_sorting_worker - if clustering_method.startswith('kilosort2.5'): + if clustering_method.startswith("kilosort2.5"): sorter_name = "kilosort2_5" else: sorter_name = clustering_method - # sorting_kilosort = si.run_sorter( - # sorter_name = sorter_name, - # recording = sglx_si_recording, - # output_folder = kilosort_dir, - # docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", - # **params - # ) - sorting_save_path = kilosort_dir / 'sorting_kilosort.pkl' + sorting_kilosort = si.run_sorter( + sorter_name=sorter_name, + recording=sglx_si_recording, + output_folder=kilosort_dir, + docker_image=f"spikeinterface/{sorter_name}-compiled-base:latest", + **params, + ) + sorting_save_path = kilosort_dir / "sorting_kilosort.pkl" sorting_kilosort.dump_to_pickle(sorting_save_path) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_si_recording = sic.load_extractor(recording_fullpath) + oe_si_recording = sic.load_extractor(recording_fullpath) assert len(oe_probe.recording_info["recording_files"]) == 1 - if clustering_method.startswith('kilosort2.5'): + if clustering_method.startswith("kilosort2.5"): sorter_name = "kilosort2_5" else: sorter_name = clustering_method - # sorting_kilosort = si.run_sorter( - # sorter_name = sorter_name, - # recording = oe_si_recording, - # output_folder = kilosort_dir, - # docker_image = f"spikeinterface/{sorter_name}-compiled-base:latest", - # **params - # ) - sorting_save_path = kilosort_dir / 'sorting_kilosort.pkl' + sorting_kilosort = si.run_sorter( + sorter_name=sorter_name, + recording=oe_si_recording, + output_folder=kilosort_dir, + docker_image=f"spikeinterface/{sorter_name}-compiled-base:latest", + **params, + ) + sorting_save_path = kilosort_dir / "sorting_kilosort.pkl" sorting_kilosort.dump_to_pickle(sorting_save_path) - # sorting_kilosort.save(folder=kilosort_dir, n_jobs=20, chunk_size=30000) self.insert1( { **key, + "sorting_filename": list(sorting_save_path.parts)[-1], "execution_time": execution_time, "execution_duration": ( datetime.utcnow() - execution_time @@ -321,6 +346,7 @@ def make(self, key): } ) + @schema class PostProcessing(dj.Imported): """A processing table to handle each clustering task.""" @@ -345,53 +371,100 @@ def make(self, key): params = (PreProcessing & key).fetch1("params") if acq_software == "SpikeGLX": - sorting_file = kilosort_dir / 'sorting_kilosort' - recording_file = kilosort_dir / 'sglx_recording_cmr.json' - sglx_si_recording = sic.load_extractor(recording_file) + recording_filename = (PreProcessing & key).fetch1("recording_filename") + sorting_file = kilosort_dir / "sorting_kilosort" + filtered_recording_file = kilosort_dir / recording_filename + sglx_si_recording_filtered = sic.load_extractor(recording_file) sorting_kilosort = sic.load_extractor(sorting_file) - we_kilosort = si.WaveformExtractor.create(sglx_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True) + we_kilosort = si.WaveformExtractor.create( + sglx_si_recording_filtered, + sorting_kilosort, + "waveforms", + remove_if_exists=True, + ) + we_kilosort.set_params(ms_before=3.0, ms_after=4.0, max_spikes_per_unit=500) we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) unit_id0 = sorting_kilosort.unit_ids[0] waveforms = we_kilosort.get_waveforms(unit_id0) template = we_kilosort.get_template(unit_id0) snrs = si.compute_snrs(we_kilosort) - - # QC Metrics - si_violations_ratio, isi_violations_rate, isi_violations_count = si.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) - metrics = si.compute_quality_metrics(we_kilosort, metric_names=["firing_rate","snr","presence_ratio","isi_violation", - "num_spikes","amplitude_cutoff","amplitude_median","sliding_rp_violation","rp_violation","drift"]) + # QC Metrics + ( + si_violations_ratio, + isi_violations_rate, + isi_violations_count, + ) = si.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) + metrics = si.compute_quality_metrics( + we_kilosort, + metric_names=[ + "firing_rate", + "snr", + "presence_ratio", + "isi_violation", + "num_spikes", + "amplitude_cutoff", + "amplitude_median", + "sliding_rp_violation", + "rp_violation", + "drift", + ], + ) sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) # ["firing_rate","snr","presence_ratio","isi_violation", # "number_violation","amplitude_cutoff","isolation_distance","l_ratio","d_prime","nn_hit_rate", # "nn_miss_rate","silhouette_core","cumulative_drift","contamination_rate"]) - - we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) - + we_savedir = kilosort_dir / "we_kilosort" + we_kilosort.save(we_savedir, n_jobs=-1, chunk_size=30000) elif acq_software == "Open Ephys": - sorting_file = kilosort_dir / 'sorting_kilosort' - recording_file = kilosort_dir / 'sglx_recording_cmr.json' + sorting_file = kilosort_dir / "sorting_kilosort" + recording_file = kilosort_dir / "sglx_recording_cmr.json" sglx_si_recording = sic.load_extractor(recording_file) sorting_kilosort = sic.load_extractor(sorting_file) - we_kilosort = si.WaveformExtractor.create(sglx_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True) + we_kilosort = si.WaveformExtractor.create( + sglx_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True + ) + we_kilosort.set_params(ms_before=3.0, ms_after=4.0, max_spikes_per_unit=500) we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) unit_id0 = sorting_kilosort.unit_ids[0] waveforms = we_kilosort.get_waveforms(unit_id0) template = we_kilosort.get_template(unit_id0) snrs = si.compute_snrs(we_kilosort) - - # QC Metrics - si_violations_ratio, isi_violations_rate, isi_violations_count = si.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) - metrics = si.compute_quality_metrics(we_kilosort, metric_names=["firing_rate","snr","presence_ratio","isi_violation", - "num_spikes","amplitude_cutoff","amplitude_median","sliding_rp_violation","rp_violation","drift"]) + # QC Metrics + # Apply waveform extractor extensions + spike_locations = si.compute_spike_locations(we_kilosort) + spike_amplitudes = si.compute_spike_amplitudes(we_kilosort) + unit_locations = si.compute_unit_locations(we_kilosort) + template_metrics = si.compute_template_metrics(we_kilosort) + noise_levels = si.compute_noise_levels(we_kilosort) + drift_metrics = si.compute_drift_metrics(we_kilosort) + + (isi_violations_ratio, isi_violations_count) = si.compute_isi_violations( + we_kilosort, isi_threshold_ms=1.5 + ) + (isi_histograms, bins) = si.compute_isi_histograms(we_kilosort) + metrics = si.compute_quality_metrics( + we_kilosort, + metric_names=[ + "firing_rate", + "snr", + "presence_ratio", + "isi_violation", + "num_spikes", + "amplitude_cutoff", + "amplitude_median", + # "sliding_rp_violation", + "rp_violation", + "drift", + ], + ) sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) - we_kilosort.save_to_folder('we_kilosort',kilosort_dir, n_jobs=-1, chunk_size=30000) - + we_kilosort.save("we_kilosort", kilosort_dir, n_jobs=-1, chunk_size=30000) self.insert1( { @@ -409,8 +482,9 @@ def make(self, key): {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) + ## Example SI parameter set -''' +""" {'detect_threshold': 6, 'projection_threshold': [10, 4], 'preclust_threshold': 8, @@ -447,4 +521,4 @@ def make(self, key): 'ZeroChannelPad': False, 'DeepInterpolation': False, 'Resample': False}} -''' \ No newline at end of file +""" From 1febd7e05111aa19679c64f86947973e0b533ebf Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Fri, 17 Feb 2023 19:37:12 -0600 Subject: [PATCH 031/204] post processing waveform extractor extensions --- element_array_ephys/spike_sorting/si_clustering.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 13f129d8..a018119d 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -441,8 +441,9 @@ def make(self, key): unit_locations = si.compute_unit_locations(we_kilosort) template_metrics = si.compute_template_metrics(we_kilosort) noise_levels = si.compute_noise_levels(we_kilosort) + pcs = si.compute_principal_components(we_kilosort) drift_metrics = si.compute_drift_metrics(we_kilosort) - + template_similarity = si.compute_tempoate_similarity(we_kilosort) (isi_violations_ratio, isi_violations_count) = si.compute_isi_violations( we_kilosort, isi_threshold_ms=1.5 ) From a478e0679ee5ae3747a324880415f090791cb868 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Mon, 20 Feb 2023 16:21:23 -0600 Subject: [PATCH 032/204] Fix data loading bug related to cluster_groups and KSLabel df key --- element_array_ephys/readers/kilosort.py | 42 +++++++++++++------------ 1 file changed, 22 insertions(+), 20 deletions(-) diff --git a/element_array_ephys/readers/kilosort.py b/element_array_ephys/readers/kilosort.py index abddee74..e88ba335 100644 --- a/element_array_ephys/readers/kilosort.py +++ b/element_array_ephys/readers/kilosort.py @@ -1,19 +1,16 @@ -import logging -import pathlib -import re -from datetime import datetime from os import path - -import numpy as np +from datetime import datetime +import pathlib import pandas as pd - +import numpy as np +import re +import logging from .utils import convert_to_number log = logging.getLogger(__name__) class Kilosort: - _kilosort_core_files = [ "params.py", "amplitudes.npy", @@ -118,7 +115,8 @@ def _load(self): # Read the Cluster Groups for cluster_pattern, cluster_col_name in zip( - ["cluster_group.*", "cluster_KSLabel.*"], ["group", "KSLabel"] + ["cluster_group.*", "cluster_KSLabel.*", "cluster_group.*"], + ["group", "KSLabel", "KSLabel"], ): try: cluster_file = next(self._kilosort_dir.glob(cluster_pattern)) @@ -127,22 +125,26 @@ def _load(self): else: cluster_file_suffix = cluster_file.suffix assert cluster_file_suffix in (".tsv", ".xlsx") - break + + if cluster_file_suffix == ".tsv": + df = pd.read_csv(cluster_file, sep="\t", header=0) + elif cluster_file_suffix == ".xlsx": + df = pd.read_excel(cluster_file, engine="openpyxl") + else: + df = pd.read_csv(cluster_file, delimiter="\t") + + try: + self._data["cluster_groups"] = np.array(df[cluster_col_name].values) + self._data["cluster_ids"] = np.array(df["cluster_id"].values) + except KeyError: + continue + else: + break else: raise FileNotFoundError( 'Neither "cluster_groups" nor "cluster_KSLabel" file found!' ) - if cluster_file_suffix == ".tsv": - df = pd.read_csv(cluster_file, sep="\t", header=0) - elif cluster_file_suffix == ".xlsx": - df = pd.read_excel(cluster_file, engine="openpyxl") - else: - df = pd.read_csv(cluster_file, delimiter="\t") - - self._data["cluster_groups"] = np.array(df[cluster_col_name].values) - self._data["cluster_ids"] = np.array(df["cluster_id"].values) - def get_best_channel(self, unit): template_idx = self.data["spike_templates"][ np.where(self.data["spike_clusters"] == unit)[0][0] From 634761ddad17bfd024b84567881499fba5e2d46e Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Tue, 21 Feb 2023 18:13:02 -0600 Subject: [PATCH 033/204] waveform extraction wip --- element_array_ephys/ephys_no_curation.py | 109 ++++++++++-------- .../spike_sorting/si_clustering.py | 4 +- 2 files changed, 63 insertions(+), 50 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index f4ed4b55..69afaea2 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1,17 +1,17 @@ -import gc -import importlib -import inspect +import datajoint as dj import pathlib import re -from decimal import Decimal - -import datajoint as dj import numpy as np +import inspect +import importlib +import gc +from decimal import Decimal import pandas as pd -from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory -from . import ephys_report, get_logger, probe -from .readers import kilosort, openephys, spikeglx +from element_interface.utils import find_root_directory, find_full_path, dict_to_uuid +from .readers import spikeglx, kilosort, openephys +from element_array_ephys import probe, get_logger, ephys_report + log = get_logger(__name__) @@ -19,6 +19,9 @@ _linking_module = None +import spikeinterface +import spikeinterface.full as si + def activate( ephys_schema_name: str, @@ -32,7 +35,7 @@ def activate( Args: ephys_schema_name (str): A string containing the name of the ephys schema. - probe_schema_name (str): A string containing the name of the probe schema. + probe_schema_name (str): A string containing the name of the probe scehma. create_schema (bool): If True, schema will be created in the database. create_tables (bool): If True, tables related to the schema will be created in the database. linking_module (str): A string containing the module name or module containing the required dependencies to activate the schema. @@ -129,7 +132,7 @@ class AcquisitionSoftware(dj.Lookup): """ definition = """ # Name of software used for recording of neuropixels probes - SpikeGLX or Open Ephys - acq_software: varchar(24) + acq_software: varchar(24) """ contents = zip(["SpikeGLX", "Open Ephys"]) @@ -272,11 +275,11 @@ class EphysRecording(dj.Imported): definition = """ # Ephys recording from a probe insertion for a given session. - -> ProbeInsertion + -> ProbeInsertion --- -> probe.ElectrodeConfig -> AcquisitionSoftware - sampling_rate: float # (Hz) + sampling_rate: float # (Hz) recording_datetime: datetime # datetime of the recording from this probe recording_duration: float # (seconds) duration of the recording from this probe """ @@ -315,8 +318,8 @@ def make(self, key): break else: raise FileNotFoundError( - "Ephys recording data not found!" - " Neither SpikeGLX nor Open Ephys recording files found" + f"Ephys recording data not found!" + f" Neither SpikeGLX nor Open Ephys recording files found" ) supported_probe_types = probe.ProbeType.fetch("probe_type") @@ -471,9 +474,9 @@ class Electrode(dj.Part): definition = """ -> master - -> probe.ElectrodeConfig.Electrode + -> probe.ElectrodeConfig.Electrode --- - lfp: longblob # (uV) recorded lfp at this electrode + lfp: longblob # (uV) recorded lfp at this electrode """ # Only store LFP for every 9th channel, due to high channel density, @@ -614,14 +617,14 @@ class ClusteringParamSet(dj.Lookup): ClusteringMethod (dict): ClusteringMethod primary key. paramset_desc (varchar(128) ): Description of the clustering parameter set. param_set_hash (uuid): UUID hash for the parameter set. - params (longblob): Set of clustering parameters + params (longblob) """ definition = """ # Parameter set to be used in a clustering procedure paramset_idx: smallint --- - -> ClusteringMethod + -> ClusteringMethod paramset_desc: varchar(128) param_set_hash: uuid unique index (param_set_hash) @@ -724,18 +727,15 @@ class ClusteringTask(dj.Manual): """ @classmethod - def infer_output_dir( - cls, key, relative: bool = False, mkdir: bool = False - ) -> pathlib.Path: + def infer_output_dir(cls, key, relative: bool = False, mkdir: bool = False): """Infer output directory if it is not provided. Args: key (dict): ClusteringTask primary key. Returns: - Expected clustering_output_dir based on the following convention: - processed_dir / session_dir / probe_{insertion_number} / {clustering_method}_{paramset_idx} - e.g.: sub4/sess1/probe_2/kilosort2_0 + Pathlib.Path: Expected clustering_output_dir based on the following convention: processed_dir / session_dir / probe_{insertion_number} / {clustering_method}_{paramset_idx} + e.g.: sub4/sess1/probe_2/kilosort2_0 """ processed_dir = pathlib.Path(get_processed_root_data_dir()) session_dir = find_full_path( @@ -802,14 +802,14 @@ class Clustering(dj.Imported): Attributes: ClusteringTask (foreign key): ClusteringTask primary key. clustering_time (datetime): Time when clustering results are generated. - package_version (varchar(16): Package version used for a clustering analysis. + package_version (varchar(16) ): Package version used for a clustering analysis. """ definition = """ # Clustering Procedure -> ClusteringTask --- - clustering_time: datetime # time of generation of this set of clustering results + clustering_time: datetime # time of generation of this set of clustering results package_version='': varchar(16) """ @@ -850,10 +850,6 @@ def make(self, key): spikeglx_meta_filepath.parent ) spikeglx_recording.validate_file("ap") - run_CatGT = ( - params.pop("run_CatGT", True) - and "_tcat." not in spikeglx_meta_filepath.stem - ) if clustering_method.startswith("pykilosort"): kilosort_triggering.run_pykilosort( @@ -874,7 +870,7 @@ def make(self, key): ks_output_dir=kilosort_dir, params=params, KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=run_CatGT, + run_CatGT=True, ) run_kilosort.run_modules() elif acq_software == "Open Ephys": @@ -929,7 +925,7 @@ class CuratedClustering(dj.Imported): definition = """ # Clustering results of the spike sorting step. - -> Clustering + -> Clustering """ class Unit(dj.Part): @@ -946,7 +942,7 @@ class Unit(dj.Part): spike_depths (longblob): Array of depths associated with each spike, relative to each spike. """ - definition = """ + definition = """ # Properties of a given unit from a round of clustering (and curation) -> master unit: int @@ -956,7 +952,7 @@ class Unit(dj.Part): spike_count: int # how many spikes in this recording for this unit spike_times: longblob # (s) spike times of this unit, relative to the start of the EphysRecording spike_sites : longblob # array of electrode associated with each spike - spike_depths=null : longblob # (um) array of depths associated with each spike, relative to the (0, 0) of the probe + spike_depths=null : longblob # (um) array of depths associated with each spike, relative to the (0, 0) of the probe """ def make(self, key): @@ -1080,8 +1076,8 @@ class Waveform(dj.Part): # Spike waveforms and their mean across spikes for the given unit -> master -> CuratedClustering.Unit - -> probe.ElectrodeConfig.Electrode - --- + -> probe.ElectrodeConfig.Electrode + --- waveform_mean: longblob # (uV) mean waveform across spikes of the given unit waveforms=null: longblob # (uV) (spike x sample) waveforms of a sampling of spikes at the given electrode for the given unit """ @@ -1109,15 +1105,32 @@ def make(self, key): for u in (CuratedClustering.Unit & key).fetch(as_dict=True, order_by="unit") } + waveforms_folder = kilosort_dir / "we_kilosort" + + waveforms_folder = kilosort_dir.rglob(*waveform) + # Mean waveforms need to be extracted from waveform extractor object + if (waveforms_folder).exists(): + we_kilosort = si.load_waveforms(waveforms_folder) + unit_waveforms = we_kilosort.get_all_templates() + + def yield_unit_waveforms(): + for unit_no, unit_waveform in zip( + kilosort_dataset.data["cluster_ids"], unit_waveforms + ): + unit_peak_waveform = {} + unit_electrode_waveforms = [] + + if unit_no in units: + unit_waveform = we_kilosort.get_waveforms(unit_id=unit_no) + mean_templates = we_kilosort.get_templates(unit_id=unit_no) + if (kilosort_dir / "mean_waveforms.npy").exists(): unit_waveforms = np.load( kilosort_dir / "mean_waveforms.npy" ) # unit x channel x sample def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): + for unit_no, unit_waveform in zip(cluster_ids, unit_waveforms): unit_peak_waveform = {} unit_electrode_waveforms = [] if unit_no in units: @@ -1207,7 +1220,7 @@ class QualityMetrics(dj.Imported): definition = """ # Clusters and waveforms metrics - -> CuratedClustering + -> CuratedClustering """ class Cluster(dj.Part): @@ -1232,26 +1245,26 @@ class Cluster(dj.Part): contamination_rate (float): Frequency of spikes in the refractory period. """ - definition = """ + definition = """ # Cluster metrics for a particular unit -> master -> CuratedClustering.Unit --- - firing_rate=null: float # (Hz) firing rate for a unit + firing_rate=null: float # (Hz) firing rate for a unit snr=null: float # signal-to-noise ratio for a unit presence_ratio=null: float # fraction of time in which spikes are present isi_violation=null: float # rate of ISI violation as a fraction of overall rate number_violation=null: int # total number of ISI violations amplitude_cutoff=null: float # estimate of miss rate based on amplitude histogram isolation_distance=null: float # distance to nearest cluster in Mahalanobis space - l_ratio=null: float # + l_ratio=null: float # d_prime=null: float # Classification accuracy based on LDA nn_hit_rate=null: float # Fraction of neighbors for target cluster that are also in target cluster nn_miss_rate=null: float # Fraction of neighbors outside target cluster that are in target cluster silhouette_score=null: float # Standard metric for cluster overlap max_drift=null: float # Maximum change in spike depth throughout recording - cumulative_drift=null: float # Cumulative change in spike depth throughout recording - contamination_rate=null: float # + cumulative_drift=null: float # Cumulative change in spike depth throughout recording + contamination_rate=null: float # """ class Waveform(dj.Part): @@ -1268,10 +1281,10 @@ class Waveform(dj.Part): recovery_slope (float): Slope of the regression line fit to first 30 microseconds from peak to tail. spread (float): The range with amplitude over 12-percent of maximum amplitude along the probe. velocity_above (float): inverse velocity of waveform propagation from soma to the top of the probe. - velocity_below (float): inverse velocity of waveform propagation from soma toward the bottom of the probe. + velocity_below (float) inverse velocity of waveform propagation from soma toward the bottom of the probe. """ - definition = """ + definition = """ # Waveform metrics for a particular unit -> master -> CuratedClustering.Unit diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index a018119d..be50356b 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -464,8 +464,8 @@ def make(self, key): ], ) sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) - - we_kilosort.save("we_kilosort", kilosort_dir, n_jobs=-1, chunk_size=30000) + we_savedir = kilosort_dir / "we_kilosort" + we_kilosort.save(we_savedir, n_jobs=-1, chunk_size=30000) self.insert1( { From ff0dfee68bc45fbc43e42dec19541473ec9090e0 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Wed, 22 Feb 2023 20:08:36 -0600 Subject: [PATCH 034/204] modification to handle spike interface waveforms --- element_array_ephys/ephys_no_curation.py | 84 ++++++++++++++++++++---- 1 file changed, 70 insertions(+), 14 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 69afaea2..85ecb1a7 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1105,13 +1105,14 @@ def make(self, key): for u in (CuratedClustering.Unit & key).fetch(as_dict=True, order_by="unit") } - waveforms_folder = kilosort_dir / "we_kilosort" + waveforms_folder = [ + f for f in kilosort_dir.parent.rglob(r"*/waveforms*") if f.is_dir() + ] - waveforms_folder = kilosort_dir.rglob(*waveform) - # Mean waveforms need to be extracted from waveform extractor object - if (waveforms_folder).exists(): - we_kilosort = si.load_waveforms(waveforms_folder) - unit_waveforms = we_kilosort.get_all_templates() + if (kilosort_dir / "mean_waveforms.npy").exists(): + unit_waveforms = np.load( + kilosort_dir / "mean_waveforms.npy" + ) # unit x channel x sample def yield_unit_waveforms(): for unit_no, unit_waveform in zip( @@ -1119,18 +1120,46 @@ def yield_unit_waveforms(): ): unit_peak_waveform = {} unit_electrode_waveforms = [] - if unit_no in units: - unit_waveform = we_kilosort.get_waveforms(unit_id=unit_no) - mean_templates = we_kilosort.get_templates(unit_id=unit_no) + for channel, channel_waveform in zip( + kilosort_dataset.data["channel_map"], unit_waveform + ): + unit_electrode_waveforms.append( + { + **units[unit_no], + **channel2electrodes[channel], + "waveform_mean": channel_waveform, + } + ) + if ( + channel2electrodes[channel]["electrode"] + == units[unit_no]["electrode"] + ): + unit_peak_waveform = { + **units[unit_no], + "peak_electrode_waveform": channel_waveform, + } + yield unit_peak_waveform, unit_electrode_waveforms - if (kilosort_dir / "mean_waveforms.npy").exists(): - unit_waveforms = np.load( - kilosort_dir / "mean_waveforms.npy" - ) # unit x channel x sample + # Spike interface mean and peak waveform extraction from we object + + elif len(waveforms_folder) > 0 & (waveforms_folder[0]).exists(): + we_kilosort = si.load_waveforms(waveforms_folder[0].parent) + unit_templates = we_kilosort.get_all_templates() + unit_waveforms = np.reshape( + unit_templates, + ( + unit_templates.shape[1], + unit_templates.shape[3], + unit_templates.shape[2], + ), + ) + # Approach assumes unit_waveforms was generated correctly (templates are actually the same as mean_waveforms) def yield_unit_waveforms(): - for unit_no, unit_waveform in zip(cluster_ids, unit_waveforms): + for unit_no, unit_waveform in zip( + kilosort_dataset.data["cluster_ids"], unit_waveforms + ): unit_peak_waveform = {} unit_electrode_waveforms = [] if unit_no in units: @@ -1154,6 +1183,33 @@ def yield_unit_waveforms(): } yield unit_peak_waveform, unit_electrode_waveforms + # Approach not using spike interface templates (ie. taking mean of each unit waveform) + # def yield_unit_waveforms(): + # for unit_id in we_kilosort.unit_ids: + # unit_waveform = np.mean(we_kilosort.get_waveforms(unit_id), 0) + # unit_peak_waveform = {} + # unit_electrode_waveforms = [] + # if unit_id in units: + # for channel, channel_waveform in zip( + # kilosort_dataset.data["channel_map"], unit_waveform + # ): + # unit_electrode_waveforms.append( + # { + # **units[unit_id], + # **channel2electrodes[channel], + # "waveform_mean": channel_waveform, + # } + # ) + # if ( + # channel2electrodes[channel]["electrode"] + # == units[unit_id]["electrode"] + # ): + # unit_peak_waveform = { + # **units[unit_id], + # "peak_electrode_waveform": channel_waveform, + # } + # yield unit_peak_waveform, unit_electrode_waveforms + else: if acq_software == "SpikeGLX": spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) From cb31229e89d6599c71647a3c7bb34e2498dcd192 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Wed, 22 Feb 2023 20:11:37 -0600 Subject: [PATCH 035/204] adjust post processing --- .../spike_sorting/si_clustering.py | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index be50356b..84f26644 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -130,10 +130,15 @@ def make(self, key): clustering_method in _supported_kilosort_versions ), f'Clustering_method "{clustering_method}" is not supported' + if clustering_method.startswith("kilosort2.5"): + sorter_name = "kilosort2_5" + else: + sorter_name = clustering_method # add additional probe-recording and channels details into `params` # params = {**params, **ephys.get_recording_channels_details(key)} # params["fs"] = params["sample_rate"] + default_params = si.get_default_sorter_params(sorter_name) preprocess_list = params.pop("PreProcessing_params") # If else @@ -406,7 +411,7 @@ def make(self, key): "num_spikes", "amplitude_cutoff", "amplitude_median", - "sliding_rp_violation", + # "sliding_rp_violation", "rp_violation", "drift", ], @@ -436,14 +441,14 @@ def make(self, key): # QC Metrics # Apply waveform extractor extensions - spike_locations = si.compute_spike_locations(we_kilosort) - spike_amplitudes = si.compute_spike_amplitudes(we_kilosort) - unit_locations = si.compute_unit_locations(we_kilosort) - template_metrics = si.compute_template_metrics(we_kilosort) - noise_levels = si.compute_noise_levels(we_kilosort) - pcs = si.compute_principal_components(we_kilosort) - drift_metrics = si.compute_drift_metrics(we_kilosort) - template_similarity = si.compute_tempoate_similarity(we_kilosort) + _ = si.compute_spike_locations(we_kilosort) + _ = si.compute_spike_amplitudes(we_kilosort) + _ = si.compute_unit_locations(we_kilosort) + _ = si.compute_template_metrics(we_kilosort) + _ = si.compute_noise_levels(we_kilosort) + _ = si.compute_principal_components(we_kilosort) + _ = si.compute_drift_metrics(we_kilosort) + _ = si.compute_tempoate_similarity(we_kilosort) (isi_violations_ratio, isi_violations_count) = si.compute_isi_violations( we_kilosort, isi_threshold_ms=1.5 ) From 6098421e9037017be02010672402efd885ce5b24 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Mon, 6 Mar 2023 11:56:38 -0600 Subject: [PATCH 036/204] bugfix in postprocessing definition --- element_array_ephys/spike_sorting/si_clustering.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 84f26644..f3fd4c1d 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -10,7 +10,7 @@ - create recording extractor and link it to a probe - bandpass filtering - common mode referencing -2. ClusteringModule - kilosort (MATLAB) - requires GPU and docker/singularity containers +2. SIClustering - kilosort (MATLAB) - requires GPU and docker/singularity containers - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) 3. PostProcessing - for postprocessing steps (no GPU required) - create waveform extractor object @@ -357,7 +357,7 @@ class PostProcessing(dj.Imported): """A processing table to handle each clustering task.""" definition = """ - -> ClusteringModule + -> SIClustering --- execution_time: datetime # datetime of the start of this step execution_duration: float # (hour) execution duration @@ -426,11 +426,11 @@ def make(self, key): elif acq_software == "Open Ephys": sorting_file = kilosort_dir / "sorting_kilosort" recording_file = kilosort_dir / "sglx_recording_cmr.json" - sglx_si_recording = sic.load_extractor(recording_file) + oe_si_recording = sic.load_extractor(recording_file) sorting_kilosort = sic.load_extractor(sorting_file) we_kilosort = si.WaveformExtractor.create( - sglx_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True + oe_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True ) we_kilosort.set_params(ms_before=3.0, ms_after=4.0, max_spikes_per_unit=500) we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) @@ -472,6 +472,9 @@ def make(self, key): we_savedir = kilosort_dir / "we_kilosort" we_kilosort.save(we_savedir, n_jobs=-1, chunk_size=30000) + metrics_savefile = kilosort_dir / "metrics.csv" + metrics.to_csv(metrics_savefile) + self.insert1( { **key, From 60064646c1d3c65d2d37173f0ef686fa5a108387 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Tue, 7 Mar 2023 18:17:35 -0600 Subject: [PATCH 037/204] add SI ibl destriping and catGT implementations --- .../spike_sorting/si_clustering.py | 108 +++++++++++------- 1 file changed, 69 insertions(+), 39 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index f3fd4c1d..cb4e1858 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -141,45 +141,6 @@ def make(self, key): default_params = si.get_default_sorter_params(sorter_name) preprocess_list = params.pop("PreProcessing_params") - # If else - # need to figure out ordering - if preprocess_list["Filter"]: - oe_si_recording = sip.FilterRecording(oe_si_recording) - elif preprocess_list["BandpassFilter"]: - oe_si_recording = sip.BandpassFilterRecording(oe_si_recording) - elif preprocess_list["HighpassFilter"]: - oe_si_recording = sip.HighpassFilterRecording(oe_si_recording) - elif preprocess_list["NormalizeByQuantile"]: - oe_si_recording = sip.NormalizeByQuantileRecording(oe_si_recording) - elif preprocess_list["Scale"]: - oe_si_recording = sip.ScaleRecording(oe_si_recording) - elif preprocess_list["Center"]: - oe_si_recording = sip.CenterRecording(oe_si_recording) - elif preprocess_list["ZScore"]: - oe_si_recording = sip.ZScoreRecording(oe_si_recording) - elif preprocess_list["Whiten"]: - oe_si_recording = sip.WhitenRecording(oe_si_recording) - elif preprocess_list["CommonReference"]: - oe_si_recording = sip.CommonReferenceRecording(oe_si_recording) - elif preprocess_list["PhaseShift"]: - oe_si_recording = sip.PhaseShiftRecording(oe_si_recording) - elif preprocess_list["Rectify"]: - oe_si_recording = sip.RectifyRecording(oe_si_recording) - elif preprocess_list["Clip"]: - oe_si_recording = sip.ClipRecording(oe_si_recording) - elif preprocess_list["BlankSaturation"]: - oe_si_recording = sip.BlankSaturationRecording(oe_si_recording) - elif preprocess_list["RemoveArtifacts"]: - oe_si_recording = sip.RemoveArtifactsRecording(oe_si_recording) - elif preprocess_list["RemoveBadChannels"]: - oe_si_recording = sip.RemoveBadChannelsRecording(oe_si_recording) - elif preprocess_list["ZeroChannelPad"]: - oe_si_recording = sip.ZeroChannelPadRecording(oe_si_recording) - elif preprocess_list["DeepInterpolation"]: - oe_si_recording = sip.DeepInterpolationRecording(oe_si_recording) - elif preprocess_list["Resample"]: - oe_si_recording = sip.ResampleRecording(oe_si_recording) - if acq_software == "SpikeGLX": # sglx_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) sglx_filepath = ephys.get_spikeglx_meta_filepath(key) @@ -212,6 +173,8 @@ def make(self, key): save_file_path = kilosort_dir / save_file_name sglx_si_recording_filtered.dump_to_pickle(file_path=save_file_path) + sglx_si_recording = run_IBLdestriping(sglx_si_recording) + elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) oe_session_full_path = find_full_path( @@ -492,6 +455,73 @@ def make(self, key): ) +# def runPreProcessList(preprocess_list, recording): +# # If else +# # need to figure out ordering +# if preprocess_list["Filter"]: +# recording = sip.FilterRecording(recording) +# if preprocess_list["BandpassFilter"]: +# recording = sip.BandpassFilterRecording(recording) +# if preprocess_list["HighpassFilter"]: +# recording = sip.HighpassFilterRecording(recording) +# if preprocess_list["NormalizeByQuantile"]: +# recording = sip.NormalizeByQuantileRecording(recording) +# if preprocess_list["Scale"]: +# recording = sip.ScaleRecording(recording) +# if preprocess_list["Center"]: +# recording = sip.CenterRecording(recording) +# if preprocess_list["ZScore"]: +# recording = sip.ZScoreRecording(recording) +# if preprocess_list["Whiten"]: +# recording = sip.WhitenRecording(recording) +# if preprocess_list["CommonReference"]: +# recording = sip.CommonReferenceRecording(recording) +# if preprocess_list["PhaseShift"]: +# recording = sip.PhaseShiftRecording(recording) +# elif preprocess_list["Rectify"]: +# recording = sip.RectifyRecording(recording) +# elif preprocess_list["Clip"]: +# recording = sip.ClipRecording(recording) +# elif preprocess_list["BlankSaturation"]: +# recording = sip.BlankSaturationRecording(recording) +# elif preprocess_list["RemoveArtifacts"]: +# recording = sip.RemoveArtifactsRecording(recording) +# elif preprocess_list["RemoveBadChannels"]: +# recording = sip.RemoveBadChannelsRecording(recording) +# elif preprocess_list["ZeroChannelPad"]: +# recording = sip.ZeroChannelPadRecording(recording) +# elif preprocess_list["DeepInterpolation"]: +# recording = sip.DeepInterpolationRecording(recording) +# elif preprocess_list["Resample"]: +# recording = sip.ResampleRecording(recording) + + +def mimic_IBLdestriping_modified(recording): + # From SpikeInterface Implementation (https://spikeinterface.readthedocs.io/en/latest/how_to/analyse_neuropixels.html) + recording = si.highpass_filter(recording, freq_min=400.0) + bad_channel_ids, channel_labels = si.detect_bad_channels(recording) + # For IBL destriping interpolate bad channels + recording = recording.remove_channels(bad_channel_ids) + recording = si.phase_shift(recording) + recording = si.common_reference(recording, operator="median", reference="global") + return recording + +def mimic_IBLdestriping(recording): + # From International Brain Laboratory. “Spike sorting pipeline for the International Brain Laboratory”. 4 May 2022. 9 Jun 2022. + recording = si.highpass_filter(recording, freq_min=400.0) + bad_channel_ids, channel_labels = si.detect_bad_channels(recording) + # For IBL destriping interpolate bad channels + recording = sip.interpolate_bad_channels(bad_channel_ids) + recording = si.phase_shift(recording) + recording = si.highpass_spatial_filter(recording, operator="median", reference="global") + # For IBL destriping use highpass_spatial_filter used instead of common reference + return recording + +def mimic_catGT(sglx_recording): + sglx_recording = si.phase_shift(sglx_recording) + sglx_recording = si.common_reference(sglx_recording, operator="median", reference="global") + return sglx_recording + ## Example SI parameter set """ {'detect_threshold': 6, From c050875c3f3d98e085c1a2d6aa591952df124632 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Tue, 7 Mar 2023 18:24:09 -0600 Subject: [PATCH 038/204] remove preprocess params list --- .../spike_sorting/si_clustering.py | 33 +++++++++++-------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index cb4e1858..81e5f0b3 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -139,7 +139,7 @@ def make(self, key): # params["fs"] = params["sample_rate"] default_params = si.get_default_sorter_params(sorter_name) - preprocess_list = params.pop("PreProcessing_params") + # preprocess_list = params.pop("PreProcessing_params") if acq_software == "SpikeGLX": # sglx_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) @@ -173,7 +173,7 @@ def make(self, key): save_file_path = kilosort_dir / save_file_name sglx_si_recording_filtered.dump_to_pickle(file_path=save_file_path) - sglx_si_recording = run_IBLdestriping(sglx_si_recording) + sglx_si_recording = mimic_catGT(sglx_si_recording) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) @@ -208,17 +208,17 @@ def make(self, key): oe_si_recording.set_probe(probe=si_probe) # run preprocessing and save results to output folder - # Switch case to allow for specified preprocessing steps - oe_si_recording_filtered = sip.bandpass_filter( - oe_si_recording, freq_min=300, freq_max=6000 - ) - oe_recording_cmr = sip.common_reference( - oe_si_recording_filtered, reference="global", operator="median" - ) - + # # Switch case to allow for specified preprocessing steps + # oe_si_recording_filtered = sip.bandpass_filter( + # oe_si_recording, freq_min=300, freq_max=6000 + # ) + # oe_recording_cmr = sip.common_reference( + # oe_si_recording_filtered, reference="global", operator="median" + # ) + oe_si_recording = mimic_IBLdestriping(oe_si_recording) save_file_name = "si_recording.pkl" save_file_path = kilosort_dir / save_file_name - oe_si_recording_filtered.dump_to_pickle(file_path=save_file_path) + oe_si_recording.dump_to_pickle(file_path=save_file_path) self.insert1( { @@ -506,6 +506,7 @@ def mimic_IBLdestriping_modified(recording): recording = si.common_reference(recording, operator="median", reference="global") return recording + def mimic_IBLdestriping(recording): # From International Brain Laboratory. “Spike sorting pipeline for the International Brain Laboratory”. 4 May 2022. 9 Jun 2022. recording = si.highpass_filter(recording, freq_min=400.0) @@ -513,15 +514,21 @@ def mimic_IBLdestriping(recording): # For IBL destriping interpolate bad channels recording = sip.interpolate_bad_channels(bad_channel_ids) recording = si.phase_shift(recording) - recording = si.highpass_spatial_filter(recording, operator="median", reference="global") # For IBL destriping use highpass_spatial_filter used instead of common reference + recording = si.highpass_spatial_filter( + recording, operator="median", reference="global" + ) return recording + def mimic_catGT(sglx_recording): sglx_recording = si.phase_shift(sglx_recording) - sglx_recording = si.common_reference(sglx_recording, operator="median", reference="global") + sglx_recording = si.common_reference( + sglx_recording, operator="median", reference="global" + ) return sglx_recording + ## Example SI parameter set """ {'detect_threshold': 6, From 4ea56c0f3d86bd41ad4d623cc31ef82d131f03a3 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Tue, 7 Mar 2023 18:28:18 -0600 Subject: [PATCH 039/204] preprocessing changes --- .../spike_sorting/si_clustering.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 81e5f0b3..33704081 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -138,7 +138,7 @@ def make(self, key): # params = {**params, **ephys.get_recording_channels_details(key)} # params["fs"] = params["sample_rate"] - default_params = si.get_default_sorter_params(sorter_name) + # default_params = si.get_default_sorter_params(sorter_name) # preprocess_list = params.pop("PreProcessing_params") if acq_software == "SpikeGLX": @@ -163,17 +163,15 @@ def make(self, key): si_probe.set_device_channel_indices(channels_details["channel_ind"]) sglx_si_recording.set_probe(probe=si_probe) - # run preprocessing and save results to output folder - sglx_si_recording_filtered = sip.bandpass_filter( - sglx_si_recording, freq_min=300, freq_max=6000 - ) + # # run preprocessing and save results to output folder + # sglx_si_recording_filtered = sip.bandpass_filter( + # sglx_si_recording, freq_min=300, freq_max=6000 + # ) # sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") - + sglx_si_recording = mimic_catGT(sglx_si_recording) save_file_name = "si_recording.pkl" save_file_path = kilosort_dir / save_file_name - sglx_si_recording_filtered.dump_to_pickle(file_path=save_file_path) - - sglx_si_recording = mimic_catGT(sglx_si_recording) + sglx_si_recording.dump_to_pickle(file_path=save_file_path) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) From 0a875794726cc3d4c481825ed5566ede22f46514 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Mon, 15 May 2023 18:04:32 -0500 Subject: [PATCH 040/204] Update requirements.txt --- requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 528f6349..0d47a42f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,5 +6,4 @@ plotly pyopenephys>=1.1.6 seaborn scikit-image -spikeinterface -nbformat>=4.2.0 \ No newline at end of file +nbformat>=4.2.0 From 22f1f65fe3773f2e4d8803cab15b694e3921d0a2 Mon Sep 17 00:00:00 2001 From: Sidharth Hulyalkar Date: Wed, 14 Jun 2023 15:36:07 -0500 Subject: [PATCH 041/204] fix spikeglx stream loading --- element_array_ephys/spike_sorting/si_clustering.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 33704081..f99d63d2 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -146,7 +146,12 @@ def make(self, key): sglx_filepath = ephys.get_spikeglx_meta_filepath(key) # Create SI recording extractor object - sglx_si_recording = se.read_spikeglx(folder_path=sglx_filepath.parent) + stream_name = sglx_filepath.stem.split(".", 1)[1] + sglx_si_recording = se.read_spikeglx( + folder_path=sglx_filepath.parent, + stream_name=stream_name, + stream_id=stream_name, + ) channels_details = ephys.get_recording_channels_details(key) xy_coords = [ From b62f16215efe91c9310383e99a72d7abdc8de983 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 11 Oct 2023 18:32:49 -0500 Subject: [PATCH 042/204] build: :pushpin: update requirements.txt & add env,.yml --- env.yml | 7 +++++++ requirements.txt | 3 ++- setup.py | 1 + 3 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 env.yml diff --git a/env.yml b/env.yml new file mode 100644 index 00000000..e9b3ce13 --- /dev/null +++ b/env.yml @@ -0,0 +1,7 @@ +channels: + - conda-forge + - defaults +dependencies: + - pip + - python>=3.7,<3.11 +name: element_array_ephys diff --git a/requirements.txt b/requirements.txt index 0d47a42f..721bfeda 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,10 @@ datajoint>=0.13 element-interface>=0.4.0 ipywidgets +nbformat>=4.2.0 openpyxl plotly pyopenephys>=1.1.6 seaborn scikit-image -nbformat>=4.2.0 +spikeinterface \ No newline at end of file diff --git a/setup.py b/setup.py index 31b9be61..cc538478 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,7 @@ setup( name=pkg_name.replace("_", "-"), + python_requires='>=3.7, <3.11', version=__version__, # noqa F821 description="DataJoint Element for Extracellular Array Electrophysiology", long_description=long_description, From 849b576c8982b9231b4b1167740d2d1a2ad1cbdd Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 11 Oct 2023 18:36:51 -0500 Subject: [PATCH 043/204] refactor: :art: clean up spikeinterface import & remove unused import --- .../spike_sorting/si_clustering.py | 157 +++++++++--------- 1 file changed, 77 insertions(+), 80 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index f99d63d2..2cb5bf2e 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -16,16 +16,12 @@ - create waveform extractor object - extract templates, waveforms and snrs - quality_metrics - - """ + import datajoint as dj import os from element_array_ephys import get_logger -from decimal import Decimal -import json -import numpy as np -from datetime import datetime, timedelta +from datetime import datetime from element_interface.utils import find_full_path from element_array_ephys.readers import ( @@ -34,13 +30,7 @@ ) import element_array_ephys.probe as probe -import spikeinterface -import spikeinterface.full as si -import spikeinterface.core as sic -import spikeinterface.extractors as se -import spikeinterface.exporters as sie -import spikeinterface.sorters as ss -import spikeinterface.preprocessing as sip +import spikeinterface as si import probeinterface as pi log = get_logger(__name__) @@ -138,7 +128,7 @@ def make(self, key): # params = {**params, **ephys.get_recording_channels_details(key)} # params["fs"] = params["sample_rate"] - # default_params = si.get_default_sorter_params(sorter_name) + # default_params = si.full.get_default_sorter_params(sorter_name) # preprocess_list = params.pop("PreProcessing_params") if acq_software == "SpikeGLX": @@ -147,7 +137,7 @@ def make(self, key): # Create SI recording extractor object stream_name = sglx_filepath.stem.split(".", 1)[1] - sglx_si_recording = se.read_spikeglx( + sglx_si_recording = si.extractors.read_spikeglx( folder_path=sglx_filepath.parent, stream_name=stream_name, stream_id=stream_name, @@ -169,10 +159,10 @@ def make(self, key): sglx_si_recording.set_probe(probe=si_probe) # # run preprocessing and save results to output folder - # sglx_si_recording_filtered = sip.bandpass_filter( + # sglx_si_recording_filtered = si.preprocessing.bandpass_filter( # sglx_si_recording, freq_min=300, freq_max=6000 # ) - # sglx_recording_cmr = sip.common_reference(sglx_si_recording_filtered, reference="global", operator="median") + # sglx_recording_cmr = si.preprocessing.common_reference(sglx_si_recording_filtered, reference="global", operator="median") sglx_si_recording = mimic_catGT(sglx_si_recording) save_file_name = "si_recording.pkl" save_file_path = kilosort_dir / save_file_name @@ -190,8 +180,8 @@ def make(self, key): ] # Create SI recording extractor object - # oe_si_recording = se.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) - oe_si_recording = se.read_openephys( + # oe_si_recording = si.extractors.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) + oe_si_recording = si.extractors.read_openephys( folder_path=oe_session_full_path, stream_name=stream_name ) @@ -212,10 +202,10 @@ def make(self, key): # run preprocessing and save results to output folder # # Switch case to allow for specified preprocessing steps - # oe_si_recording_filtered = sip.bandpass_filter( + # oe_si_recording_filtered = si.preprocessing.bandpass_filter( # oe_si_recording, freq_min=300, freq_max=6000 # ) - # oe_recording_cmr = sip.common_reference( + # oe_recording_cmr = si.preprocessing.common_reference( # oe_si_recording_filtered, reference="global", operator="median" # ) oe_si_recording = mimic_IBLdestriping(oe_si_recording) @@ -265,8 +255,8 @@ def make(self, key): if acq_software == "SpikeGLX": # sglx_probe = ephys.get_openephys_probe_data(key) recording_fullpath = kilosort_dir / recording_filename - # sglx_si_recording = se.load_from_folder(recording_file) - sglx_si_recording = sic.load_extractor(recording_fullpath) + # sglx_si_recording = si.extractors.load_from_folder(recording_file) + sglx_si_recording = si.core.load_extractor(recording_fullpath) # assert len(oe_probe.recording_info["recording_files"]) == 1 ## Assume that the worker process will trigger this sorting step @@ -278,7 +268,7 @@ def make(self, key): sorter_name = "kilosort2_5" else: sorter_name = clustering_method - sorting_kilosort = si.run_sorter( + sorting_kilosort = si.full.run_sorter( sorter_name=sorter_name, recording=sglx_si_recording, output_folder=kilosort_dir, @@ -289,13 +279,13 @@ def make(self, key): sorting_kilosort.dump_to_pickle(sorting_save_path) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) - oe_si_recording = sic.load_extractor(recording_fullpath) + oe_si_recording = si.core.load_extractor(recording_fullpath) assert len(oe_probe.recording_info["recording_files"]) == 1 if clustering_method.startswith("kilosort2.5"): sorter_name = "kilosort2_5" else: sorter_name = clustering_method - sorting_kilosort = si.run_sorter( + sorting_kilosort = si.full.run_sorter( sorter_name=sorter_name, recording=oe_si_recording, output_folder=kilosort_dir, @@ -345,10 +335,10 @@ def make(self, key): recording_filename = (PreProcessing & key).fetch1("recording_filename") sorting_file = kilosort_dir / "sorting_kilosort" filtered_recording_file = kilosort_dir / recording_filename - sglx_si_recording_filtered = sic.load_extractor(recording_file) - sorting_kilosort = sic.load_extractor(sorting_file) + sglx_si_recording_filtered = si.core.load_extractor(recording_file) + sorting_kilosort = si.core.load_extractor(sorting_file) - we_kilosort = si.WaveformExtractor.create( + we_kilosort = si.full.WaveformExtractor.create( sglx_si_recording_filtered, sorting_kilosort, "waveforms", @@ -359,15 +349,15 @@ def make(self, key): unit_id0 = sorting_kilosort.unit_ids[0] waveforms = we_kilosort.get_waveforms(unit_id0) template = we_kilosort.get_template(unit_id0) - snrs = si.compute_snrs(we_kilosort) + snrs = si.full.compute_snrs(we_kilosort) # QC Metrics ( si_violations_ratio, isi_violations_rate, isi_violations_count, - ) = si.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) - metrics = si.compute_quality_metrics( + ) = si.full.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) + metrics = si.full.compute_quality_metrics( we_kilosort, metric_names=[ "firing_rate", @@ -382,7 +372,9 @@ def make(self, key): "drift", ], ) - sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) + si.exporters.export_report( + we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000 + ) # ["firing_rate","snr","presence_ratio","isi_violation", # "number_violation","amplitude_cutoff","isolation_distance","l_ratio","d_prime","nn_hit_rate", # "nn_miss_rate","silhouette_core","cumulative_drift","contamination_rate"]) @@ -392,10 +384,10 @@ def make(self, key): elif acq_software == "Open Ephys": sorting_file = kilosort_dir / "sorting_kilosort" recording_file = kilosort_dir / "sglx_recording_cmr.json" - oe_si_recording = sic.load_extractor(recording_file) - sorting_kilosort = sic.load_extractor(sorting_file) + oe_si_recording = si.core.load_extractor(recording_file) + sorting_kilosort = si.core.load_extractor(sorting_file) - we_kilosort = si.WaveformExtractor.create( + we_kilosort = si.full.WaveformExtractor.create( oe_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True ) we_kilosort.set_params(ms_before=3.0, ms_after=4.0, max_spikes_per_unit=500) @@ -403,23 +395,24 @@ def make(self, key): unit_id0 = sorting_kilosort.unit_ids[0] waveforms = we_kilosort.get_waveforms(unit_id0) template = we_kilosort.get_template(unit_id0) - snrs = si.compute_snrs(we_kilosort) + snrs = si.full.compute_snrs(we_kilosort) # QC Metrics # Apply waveform extractor extensions - _ = si.compute_spike_locations(we_kilosort) - _ = si.compute_spike_amplitudes(we_kilosort) - _ = si.compute_unit_locations(we_kilosort) - _ = si.compute_template_metrics(we_kilosort) - _ = si.compute_noise_levels(we_kilosort) - _ = si.compute_principal_components(we_kilosort) - _ = si.compute_drift_metrics(we_kilosort) - _ = si.compute_tempoate_similarity(we_kilosort) - (isi_violations_ratio, isi_violations_count) = si.compute_isi_violations( - we_kilosort, isi_threshold_ms=1.5 - ) - (isi_histograms, bins) = si.compute_isi_histograms(we_kilosort) - metrics = si.compute_quality_metrics( + _ = si.full.compute_spike_locations(we_kilosort) + _ = si.full.compute_spike_amplitudes(we_kilosort) + _ = si.full.compute_unit_locations(we_kilosort) + _ = si.full.compute_template_metrics(we_kilosort) + _ = si.full.compute_noise_levels(we_kilosort) + _ = si.full.compute_principal_components(we_kilosort) + _ = si.full.compute_drift_metrics(we_kilosort) + _ = si.full.compute_tempoate_similarity(we_kilosort) + ( + isi_violations_ratio, + isi_violations_count, + ) = si.full.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) + (isi_histograms, bins) = si.full.compute_isi_histograms(we_kilosort) + metrics = si.full.compute_quality_metrics( we_kilosort, metric_names=[ "firing_rate", @@ -434,7 +427,9 @@ def make(self, key): "drift", ], ) - sie.export_report(we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000) + si.exporters.export_report( + we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000 + ) we_savedir = kilosort_dir / "we_kilosort" we_kilosort.save(we_savedir, n_jobs=-1, chunk_size=30000) @@ -462,71 +457,73 @@ def make(self, key): # # If else # # need to figure out ordering # if preprocess_list["Filter"]: -# recording = sip.FilterRecording(recording) +# recording = si.preprocessing.FilterRecording(recording) # if preprocess_list["BandpassFilter"]: -# recording = sip.BandpassFilterRecording(recording) +# recording = si.preprocessing.BandpassFilterRecording(recording) # if preprocess_list["HighpassFilter"]: -# recording = sip.HighpassFilterRecording(recording) +# recording = si.preprocessing.HighpassFilterRecording(recording) # if preprocess_list["NormalizeByQuantile"]: -# recording = sip.NormalizeByQuantileRecording(recording) +# recording = si.preprocessing.NormalizeByQuantileRecording(recording) # if preprocess_list["Scale"]: -# recording = sip.ScaleRecording(recording) +# recording = si.preprocessing.ScaleRecording(recording) # if preprocess_list["Center"]: -# recording = sip.CenterRecording(recording) +# recording = si.preprocessing.CenterRecording(recording) # if preprocess_list["ZScore"]: -# recording = sip.ZScoreRecording(recording) +# recording = si.preprocessing.ZScoreRecording(recording) # if preprocess_list["Whiten"]: -# recording = sip.WhitenRecording(recording) +# recording = si.preprocessing.WhitenRecording(recording) # if preprocess_list["CommonReference"]: -# recording = sip.CommonReferenceRecording(recording) +# recording = si.preprocessing.CommonReferenceRecording(recording) # if preprocess_list["PhaseShift"]: -# recording = sip.PhaseShiftRecording(recording) +# recording = si.preprocessing.PhaseShiftRecording(recording) # elif preprocess_list["Rectify"]: -# recording = sip.RectifyRecording(recording) +# recording = si.preprocessing.RectifyRecording(recording) # elif preprocess_list["Clip"]: -# recording = sip.ClipRecording(recording) +# recording = si.preprocessing.ClipRecording(recording) # elif preprocess_list["BlankSaturation"]: -# recording = sip.BlankSaturationRecording(recording) +# recording = si.preprocessing.BlankSaturationRecording(recording) # elif preprocess_list["RemoveArtifacts"]: -# recording = sip.RemoveArtifactsRecording(recording) +# recording = si.preprocessing.RemoveArtifactsRecording(recording) # elif preprocess_list["RemoveBadChannels"]: -# recording = sip.RemoveBadChannelsRecording(recording) +# recording = si.preprocessing.RemoveBadChannelsRecording(recording) # elif preprocess_list["ZeroChannelPad"]: -# recording = sip.ZeroChannelPadRecording(recording) +# recording = si.preprocessing.ZeroChannelPadRecording(recording) # elif preprocess_list["DeepInterpolation"]: -# recording = sip.DeepInterpolationRecording(recording) +# recording = si.preprocessing.DeepInterpolationRecording(recording) # elif preprocess_list["Resample"]: -# recording = sip.ResampleRecording(recording) +# recording = si.preprocessing.ResampleRecording(recording) def mimic_IBLdestriping_modified(recording): # From SpikeInterface Implementation (https://spikeinterface.readthedocs.io/en/latest/how_to/analyse_neuropixels.html) - recording = si.highpass_filter(recording, freq_min=400.0) - bad_channel_ids, channel_labels = si.detect_bad_channels(recording) + recording = si.full.highpass_filter(recording, freq_min=400.0) + bad_channel_ids, channel_labels = si.full.detect_bad_channels(recording) # For IBL destriping interpolate bad channels recording = recording.remove_channels(bad_channel_ids) - recording = si.phase_shift(recording) - recording = si.common_reference(recording, operator="median", reference="global") + recording = si.full.phase_shift(recording) + recording = si.full.common_reference( + recording, operator="median", reference="global" + ) return recording def mimic_IBLdestriping(recording): # From International Brain Laboratory. “Spike sorting pipeline for the International Brain Laboratory”. 4 May 2022. 9 Jun 2022. - recording = si.highpass_filter(recording, freq_min=400.0) - bad_channel_ids, channel_labels = si.detect_bad_channels(recording) + recording = si.full.highpass_filter(recording, freq_min=400.0) + bad_channel_ids, channel_labels = si.full.detect_bad_channels(recording) # For IBL destriping interpolate bad channels - recording = sip.interpolate_bad_channels(bad_channel_ids) - recording = si.phase_shift(recording) + recording = si.preprocessing.interpolate_bad_channels(bad_channel_ids) + recording = si.full.phase_shift(recording) # For IBL destriping use highpass_spatial_filter used instead of common reference - recording = si.highpass_spatial_filter( + recording = si.full.highpass_spatial_filter( recording, operator="median", reference="global" ) return recording def mimic_catGT(sglx_recording): - sglx_recording = si.phase_shift(sglx_recording) - sglx_recording = si.common_reference( + sglx_recording = si.full.phase_shift(sglx_recording) + sglx_recording = si.full.common_reference( sglx_recording, operator="median", reference="global" ) return sglx_recording From 7836a8b4e6c600ba31b8af7efd0e17030e25b158 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 13 Oct 2023 16:47:09 -0500 Subject: [PATCH 044/204] modify key_source in PreProcessing --- element_array_ephys/spike_sorting/si_clustering.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 2cb5bf2e..e8b6517c 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -85,11 +85,11 @@ class PreProcessing(dj.Imported): @property def key_source(self): - return ( + return (( ephys.ClusteringTask * ephys.ClusteringParamSet & {"task_mode": "trigger"} & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' - ) - ephys.Clustering + ) - ephys.Clustering).proj() def make(self, key): """Triggers or imports clustering analysis.""" From 6bee166f5fe356ddea10e1a5b391e6daf6929ec9 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Thu, 14 Dec 2023 16:31:31 -0600 Subject: [PATCH 045/204] feat: :sparkles: improve to_probeinterface --- element_array_ephys/readers/probe_geometry.py | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/element_array_ephys/readers/probe_geometry.py b/element_array_ephys/readers/probe_geometry.py index 11e3ae99..7247abe9 100644 --- a/element_array_ephys/readers/probe_geometry.py +++ b/element_array_ephys/readers/probe_geometry.py @@ -132,8 +132,8 @@ def build_npx_probe( return elec_pos_df -def to_probeinterface(electrodes_df): - from probeinterface import Probe +def to_probeinterface(electrodes_df, **kwargs): + import probeinterface as pi probe_df = electrodes_df.copy() probe_df.rename( @@ -145,10 +145,22 @@ def to_probeinterface(electrodes_df): }, inplace=True, ) - probe_df["contact_shapes"] = "square" - probe_df["width"] = 12 - - return Probe.from_dataframe(probe_df) + # Get the contact shapes. By default, it's set to circle with a radius of 10. + contact_shapes = kwargs.get("contact_shapes", "circle") + assert ( + contact_shapes in pi.probe._possible_contact_shapes + ), f"contacts shape should be in {pi.probe._possible_contact_shapes}" + + probe_df["contact_shapes"] = contact_shapes + if contact_shapes == "circle": + probe_df["radius"] = kwargs.get("radius", 10) + elif contact_shapes == "square": + probe_df["width"] = kwargs.get("width", 10) + elif contact_shapes == "rect": + probe_df["width"] = kwargs.get("width") + probe_df["height"] = kwargs.get("height") + + return pi.Probe.from_dataframe(probe_df) def build_electrode_layouts( From 9ae6b4492583429db9931eda93c37d5092b05e8e Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 15 Dec 2023 17:10:07 -0600 Subject: [PATCH 046/204] create preprocessing.py --- .../spike_sorting/preprocessing.py | 85 +++++++++++++++++++ .../spike_sorting/si_clustering.py | 11 ++- 2 files changed, 95 insertions(+), 1 deletion(-) create mode 100644 element_array_ephys/spike_sorting/preprocessing.py diff --git a/element_array_ephys/spike_sorting/preprocessing.py b/element_array_ephys/spike_sorting/preprocessing.py new file mode 100644 index 00000000..77a95792 --- /dev/null +++ b/element_array_ephys/spike_sorting/preprocessing.py @@ -0,0 +1,85 @@ +import spikeinterface as si +from spikeinterface import preprocessing + + +def mimic_catGT(recording): + recording = si.preprocessing.phase_shift(recording) + recording = si.preprocessing.common_reference( + recording, operator="median", reference="global" + ) + return recording + + +def mimic_IBLdestriping(recording): + # From International Brain Laboratory. “Spike sorting pipeline for the International Brain Laboratory”. 4 May 2022. 9 Jun 2022. + recording = si.preprocessing.highpass_filter(recording, freq_min=400.0) + bad_channel_ids, channel_labels = si.preprocessing.detect_bad_channels(recording) + # For IBL destriping interpolate bad channels + recording = si.preprocessing.interpolate_bad_channels(bad_channel_ids) + recording = si.preprocessing.phase_shift(recording) + # For IBL destriping use highpass_spatial_filter used instead of common reference + recording = si.preprocessing.highpass_spatial_filter( + recording, operator="median", reference="global" + ) + return recording + + +def mimic_IBLdestriping_modified(recording): + # From SpikeInterface Implementation (https://spikeinterface.readthedocs.io/en/latest/how_to/analyse_neuropixels.html) + recording = si.preprocessing.highpass_filter(recording, freq_min=400.0) + bad_channel_ids, channel_labels = si.preprocessing.detect_bad_channels(recording) + # For IBL destriping interpolate bad channels + recording = recording.remove_channels(bad_channel_ids) + recording = si.preprocessing.phase_shift(recording) + recording = si.preprocessing.common_reference( + recording, operator="median", reference="global" + ) + return recording + + +_preprocessing_function = { + "catGT": mimic_catGT, + "IBLdestriping": mimic_IBLdestriping, + "IBLdestriping_modified": mimic_IBLdestriping_modified, +} + + +## Example SI parameter set +""" +{'detect_threshold': 6, + 'projection_threshold': [10, 4], + 'preclust_threshold': 8, + 'car': True, + 'minFR': 0.02, + 'minfr_goodchannels': 0.1, + 'nblocks': 5, + 'sig': 20, + 'freq_min': 150, + 'sigmaMask': 30, + 'nPCs': 3, + 'ntbuff': 64, + 'nfilt_factor': 4, + 'NT': None, + 'do_correction': True, + 'wave_length': 61, + 'keep_good_only': False, + 'PreProcessing_params': {'Filter': False, + 'BandpassFilter': True, + 'HighpassFilter': False, + 'NotchFilter': False, + 'NormalizeByQuantile': False, + 'Scale': False, + 'Center': False, + 'ZScore': False, + 'Whiten': False, + 'CommonReference': False, + 'PhaseShift': False, + 'Rectify': False, + 'Clip': False, + 'BlankSaturation': False, + 'RemoveArtifacts': False, + 'RemoveBadChannels': False, + 'ZeroChannelPad': False, + 'DeepInterpolation': False, + 'Resample': False}} +""" diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index e8b6517c..a8d5d8c0 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -31,7 +31,16 @@ import element_array_ephys.probe as probe import spikeinterface as si -import probeinterface as pi +from element_interface.utils import find_full_path, find_root_directory +from spikeinterface import sorters + +from element_array_ephys import get_logger, probe, readers + +from .preprocessing import ( + mimic_catGT, + mimic_IBLdestriping, + mimic_IBLdestriping_modified, +) log = get_logger(__name__) From 9d5eee66aea3d9b967bd22beeefd2651b93a3b6b Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 15 Dec 2023 17:12:19 -0600 Subject: [PATCH 047/204] add SI_SORTERS , SI_READERS --- .../spike_sorting/si_clustering.py | 27 ++++++++----------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index a8d5d8c0..b9e9cb2e 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -18,18 +18,10 @@ - quality_metrics """ -import datajoint as dj -import os -from element_array_ephys import get_logger from datetime import datetime -from element_interface.utils import find_full_path -from element_array_ephys.readers import ( - spikeglx, - kilosort_triggering, -) -import element_array_ephys.probe as probe - +import datajoint as dj +import probeinterface as pi import spikeinterface as si from element_interface.utils import find_full_path, find_root_directory from spikeinterface import sorters @@ -48,12 +40,6 @@ ephys = None -_supported_kilosort_versions = [ - "kilosort2", - "kilosort2.5", - "kilosort3", -] - def activate( schema_name, @@ -79,6 +65,15 @@ def activate( ) +SI_SORTERS = [s.replace(".", "_") for s in si.sorters.sorter_dict.keys()] + +SI_READERS = { + "Open Ephys": si.extractors.read_openephys, + "SpikeGLX": si.extractors.read_spikeglx, + "Intan": si.extractors.read_intan, +} + + @schema class PreProcessing(dj.Imported): """A table to handle preprocessing of each clustering task.""" From 1fceb90a1816613a0e86f2f7288ba56ba254de40 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 15 Dec 2023 17:15:45 -0600 Subject: [PATCH 048/204] feat: :art: si_clustering.PreProcessing --- .../spike_sorting/si_clustering.py | 149 +++++------------- 1 file changed, 37 insertions(+), 112 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index b9e9cb2e..5510436f 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -76,34 +76,31 @@ def activate( @schema class PreProcessing(dj.Imported): - """A table to handle preprocessing of each clustering task.""" + """A table to handle preprocessing of each clustering task. The output will be serialized and stored as a si_recording.pkl in the output directory.""" definition = """ -> ephys.ClusteringTask --- - recording_filename: varchar(30) # filename where recording object is saved to - params: longblob # finalized parameterset for this run execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration + execution_duration: float # execution duration in hours """ @property def key_source(self): - return (( + return ( ephys.ClusteringTask * ephys.ClusteringParamSet & {"task_mode": "trigger"} - & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' - ) - ephys.Clustering).proj() + & f"clustering_method in {tuple(SI_SORTERS)}" + ) - ephys.Clustering def make(self, key): """Triggers or imports clustering analysis.""" execution_time = datetime.utcnow() - task_mode, output_dir = (ephys.ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - - assert task_mode == "trigger", 'Supporting "trigger" task_mode only' + # Set the output directory + acq_software, output_dir = ( + ephys.ClusteringTask * ephys.EphysRecording & key + ).fetch1("acq_software", "clustering_output_dir") if not output_dir: output_dir = ephys.ClusteringTask.infer_output_dir( @@ -113,115 +110,43 @@ def make(self, key): ephys.ClusteringTask.update1( {**key, "clustering_output_dir": output_dir.as_posix()} ) + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method, params = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method", "params") - - assert ( - clustering_method in _supported_kilosort_versions - ), f'Clustering_method "{clustering_method}" is not supported' - - if clustering_method.startswith("kilosort2.5"): - sorter_name = "kilosort2_5" - else: - sorter_name = clustering_method - # add additional probe-recording and channels details into `params` - # params = {**params, **ephys.get_recording_channels_details(key)} - # params["fs"] = params["sample_rate"] - - # default_params = si.full.get_default_sorter_params(sorter_name) - # preprocess_list = params.pop("PreProcessing_params") - - if acq_software == "SpikeGLX": - # sglx_session_full_path = find_full_path(ephys.get_ephys_root_data_dir(),ephys.get_session_directory(key)) - sglx_filepath = ephys.get_spikeglx_meta_filepath(key) - - # Create SI recording extractor object - stream_name = sglx_filepath.stem.split(".", 1)[1] - sglx_si_recording = si.extractors.read_spikeglx( - folder_path=sglx_filepath.parent, - stream_name=stream_name, - stream_id=stream_name, - ) - - channels_details = ephys.get_recording_channels_details(key) - xy_coords = [ - list(i) - for i in zip(channels_details["x_coords"], channels_details["y_coords"]) - ] - - # Create SI probe object - si_probe = pi.Probe(ndim=2, si_units="um") - si_probe.set_contacts( - positions=xy_coords, shapes="square", shape_params={"width": 12} - ) - si_probe.create_auto_shape(probe_type="tip") - si_probe.set_device_channel_indices(channels_details["channel_ind"]) - sglx_si_recording.set_probe(probe=si_probe) - - # # run preprocessing and save results to output folder - # sglx_si_recording_filtered = si.preprocessing.bandpass_filter( - # sglx_si_recording, freq_min=300, freq_max=6000 - # ) - # sglx_recording_cmr = si.preprocessing.common_reference(sglx_si_recording_filtered, reference="global", operator="median") - sglx_si_recording = mimic_catGT(sglx_si_recording) - save_file_name = "si_recording.pkl" - save_file_path = kilosort_dir / save_file_name - sglx_si_recording.dump_to_pickle(file_path=save_file_path) - - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - oe_session_full_path = find_full_path( - ephys.get_ephys_root_data_dir(), ephys.get_session_directory(key) - ) + # Create SI recording extractor object + si_recording: si.BaseRecording = SI_READERS[acq_software]( + folder_path=output_dir + ) - assert len(oe_probe.recording_info["recording_files"]) == 1 - stream_name = os.path.split(oe_probe.recording_info["recording_files"][0])[ - 1 - ] - - # Create SI recording extractor object - # oe_si_recording = si.extractors.OpenEphysBinaryRecordingExtractor(folder_path=oe_full_path, stream_name=stream_name) - oe_si_recording = si.extractors.read_openephys( - folder_path=oe_session_full_path, stream_name=stream_name + # Add probe information to recording object + electrode_config_key = ( + probe.ElectrodeConfig * ephys.EphysRecording & key + ).fetch1("KEY") + electrodes_df = ( + ( + probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode + & electrode_config_key ) + .fetch(format="frame") + .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] + ) - channels_details = ephys.get_recording_channels_details(key) - xy_coords = [ - list(i) - for i in zip(channels_details["x_coords"], channels_details["y_coords"]) - ] + # Create SI probe object + si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) + si_recording.set_probe(probe=si_probe, in_place=True) - # Create SI probe object - si_probe = pi.Probe(ndim=2, si_units="um") - si_probe.set_contacts( - positions=xy_coords, shapes="square", shape_params={"width": 12} - ) - si_probe.create_auto_shape(probe_type="tip") - si_probe.set_device_channel_indices(channels_details["channel_ind"]) - oe_si_recording.set_probe(probe=si_probe) - - # run preprocessing and save results to output folder - # # Switch case to allow for specified preprocessing steps - # oe_si_recording_filtered = si.preprocessing.bandpass_filter( - # oe_si_recording, freq_min=300, freq_max=6000 - # ) - # oe_recording_cmr = si.preprocessing.common_reference( - # oe_si_recording_filtered, reference="global", operator="median" - # ) - oe_si_recording = mimic_IBLdestriping(oe_si_recording) - save_file_name = "si_recording.pkl" - save_file_path = kilosort_dir / save_file_name - oe_si_recording.dump_to_pickle(file_path=save_file_path) + # Run preprocessing and save results to output folder + preprocessing_method = "catGT" # where to load this info? + si_recording = { + "catGT": mimic_catGT, + "IBLdestriping": mimic_IBLdestriping, + "IBLdestriping_modified": mimic_IBLdestriping_modified, + }[preprocessing_method](si_recording) + recording_file_name = output_dir / "si_recording.pkl" + si_recording.dump_to_pickle(file_path=recording_file_name) self.insert1( { **key, - "recording_filename": save_file_name, - "params": params, "execution_time": execution_time, "execution_duration": ( datetime.utcnow() - execution_time From df8ed7464ebc3452148c0daa1f1e43987d7035ec Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 19 Dec 2023 21:53:43 -0600 Subject: [PATCH 049/204] feat: :art: si_clustering.SIClustering --- .../spike_sorting/si_clustering.py | 93 +++++++------------ 1 file changed, 35 insertions(+), 58 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 5510436f..debc4336 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -21,10 +21,11 @@ from datetime import datetime import datajoint as dj +import pandas as pd import probeinterface as pi import spikeinterface as si from element_interface.utils import find_full_path, find_root_directory -from spikeinterface import sorters +from spikeinterface import exporters, qualitymetrics, sorters from element_array_ephys import get_logger, probe, readers @@ -65,7 +66,7 @@ def activate( ) -SI_SORTERS = [s.replace(".", "_") for s in si.sorters.sorter_dict.keys()] +SI_SORTERS = [s.replace("_", ".") for s in si.sorters.sorter_dict.keys()] SI_READERS = { "Open Ephys": si.extractors.read_openephys, @@ -141,8 +142,8 @@ def make(self, key): "IBLdestriping": mimic_IBLdestriping, "IBLdestriping_modified": mimic_IBLdestriping_modified, }[preprocessing_method](si_recording) - recording_file_name = output_dir / "si_recording.pkl" - si_recording.dump_to_pickle(file_path=recording_file_name) + recording_file = output_dir / "si_recording.pkl" + si_recording.dump_to_pickle(file_path=recording_file) self.insert1( { @@ -162,72 +163,48 @@ class SIClustering(dj.Imported): definition = """ -> PreProcessing + sorter_name: varchar(30) # name of the sorter used --- - sorting_filename: varchar(30) # filename of saved sorting object - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration + execution_time: datetime # datetime of the start of this step + execution_duration: float # execution duration in hours """ def make(self, key): execution_time = datetime.utcnow() + # Load recording object. output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method") - - params = (PreProcessing & key).fetch1("params") - recording_filename = (PreProcessing & key).fetch1("recording_filename") + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + recording_file = output_dir / "si_recording.pkl" + si_recording: si.BaseRecording = si.load_extractor(recording_file) + + # Get sorter method and create output directory. + clustering_method, params = ( + ephys.ClusteringTask * ephys.ClusteringParamSet & key + ).fetch1("clustering_method", "params") + sorter_name = ( + "kilosort_2_5" if clustering_method == "kilsort2.5" else clustering_method + ) + sorter_dir = output_dir / sorter_name + + # Run sorting + si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( + sorter_name=sorter_name, + recording=si_recording, + output_folder=sorter_dir, + verbse=True, + docker_image=True, + **params, + ) - if acq_software == "SpikeGLX": - # sglx_probe = ephys.get_openephys_probe_data(key) - recording_fullpath = kilosort_dir / recording_filename - # sglx_si_recording = si.extractors.load_from_folder(recording_file) - sglx_si_recording = si.core.load_extractor(recording_fullpath) - # assert len(oe_probe.recording_info["recording_files"]) == 1 - - ## Assume that the worker process will trigger this sorting step - # - Will need to store/load the sorter_name, sglx_si_recording object etc. - # - Store in shared EC2 space accessible by all containers (needs to be mounted) - # - Load into the cloud init script, and - # - Option A: Can call this function within a separate container within spike_sorting_worker - if clustering_method.startswith("kilosort2.5"): - sorter_name = "kilosort2_5" - else: - sorter_name = clustering_method - sorting_kilosort = si.full.run_sorter( - sorter_name=sorter_name, - recording=sglx_si_recording, - output_folder=kilosort_dir, - docker_image=f"spikeinterface/{sorter_name}-compiled-base:latest", - **params, - ) - sorting_save_path = kilosort_dir / "sorting_kilosort.pkl" - sorting_kilosort.dump_to_pickle(sorting_save_path) - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - oe_si_recording = si.core.load_extractor(recording_fullpath) - assert len(oe_probe.recording_info["recording_files"]) == 1 - if clustering_method.startswith("kilosort2.5"): - sorter_name = "kilosort2_5" - else: - sorter_name = clustering_method - sorting_kilosort = si.full.run_sorter( - sorter_name=sorter_name, - recording=oe_si_recording, - output_folder=kilosort_dir, - docker_image=f"spikeinterface/{sorter_name}-compiled-base:latest", - **params, - ) - sorting_save_path = kilosort_dir / "sorting_kilosort.pkl" - sorting_kilosort.dump_to_pickle(sorting_save_path) + # Run sorting + sorting_save_path = sorter_dir / "si_sorting.pkl" + si_sorting.dump_to_pickle(sorting_save_path) self.insert1( { **key, - "sorting_filename": list(sorting_save_path.parts)[-1], + "sorter_name": sorter_name, "execution_time": execution_time, "execution_duration": ( datetime.utcnow() - execution_time From 2ed337bf0fa8245a7f8d481dc779b072cfcbadd0 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 20 Dec 2023 15:57:55 -0600 Subject: [PATCH 050/204] feat: :sparkles: add PostProcessing table & clean up --- .../spike_sorting/si_clustering.py | 275 +++--------------- 1 file changed, 45 insertions(+), 230 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index debc4336..935d7360 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -25,7 +25,7 @@ import probeinterface as pi import spikeinterface as si from element_interface.utils import find_full_path, find_root_directory -from spikeinterface import exporters, qualitymetrics, sorters +from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from element_array_ephys import get_logger, probe, readers @@ -222,126 +222,58 @@ class PostProcessing(dj.Imported): -> SIClustering --- execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration + execution_duration: float # execution duration in hours """ def make(self, key): execution_time = datetime.utcnow() + JOB_KWARGS = dict(n_jobs=-1, chunk_size=30000) + # Load sorting & recording object. output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method") - - params = (PreProcessing & key).fetch1("params") - - if acq_software == "SpikeGLX": - recording_filename = (PreProcessing & key).fetch1("recording_filename") - sorting_file = kilosort_dir / "sorting_kilosort" - filtered_recording_file = kilosort_dir / recording_filename - sglx_si_recording_filtered = si.core.load_extractor(recording_file) - sorting_kilosort = si.core.load_extractor(sorting_file) - - we_kilosort = si.full.WaveformExtractor.create( - sglx_si_recording_filtered, - sorting_kilosort, - "waveforms", - remove_if_exists=True, - ) - we_kilosort.set_params(ms_before=3.0, ms_after=4.0, max_spikes_per_unit=500) - we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) - unit_id0 = sorting_kilosort.unit_ids[0] - waveforms = we_kilosort.get_waveforms(unit_id0) - template = we_kilosort.get_template(unit_id0) - snrs = si.full.compute_snrs(we_kilosort) - - # QC Metrics - ( - si_violations_ratio, - isi_violations_rate, - isi_violations_count, - ) = si.full.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) - metrics = si.full.compute_quality_metrics( - we_kilosort, - metric_names=[ - "firing_rate", - "snr", - "presence_ratio", - "isi_violation", - "num_spikes", - "amplitude_cutoff", - "amplitude_median", - # "sliding_rp_violation", - "rp_violation", - "drift", - ], - ) - si.exporters.export_report( - we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000 - ) - # ["firing_rate","snr","presence_ratio","isi_violation", - # "number_violation","amplitude_cutoff","isolation_distance","l_ratio","d_prime","nn_hit_rate", - # "nn_miss_rate","silhouette_core","cumulative_drift","contamination_rate"]) - we_savedir = kilosort_dir / "we_kilosort" - we_kilosort.save(we_savedir, n_jobs=-1, chunk_size=30000) - - elif acq_software == "Open Ephys": - sorting_file = kilosort_dir / "sorting_kilosort" - recording_file = kilosort_dir / "sglx_recording_cmr.json" - oe_si_recording = si.core.load_extractor(recording_file) - sorting_kilosort = si.core.load_extractor(sorting_file) - - we_kilosort = si.full.WaveformExtractor.create( - oe_si_recording, sorting_kilosort, "waveforms", remove_if_exists=True - ) - we_kilosort.set_params(ms_before=3.0, ms_after=4.0, max_spikes_per_unit=500) - we_kilosort.run_extract_waveforms(n_jobs=-1, chunk_size=30000) - unit_id0 = sorting_kilosort.unit_ids[0] - waveforms = we_kilosort.get_waveforms(unit_id0) - template = we_kilosort.get_template(unit_id0) - snrs = si.full.compute_snrs(we_kilosort) - - # QC Metrics - # Apply waveform extractor extensions - _ = si.full.compute_spike_locations(we_kilosort) - _ = si.full.compute_spike_amplitudes(we_kilosort) - _ = si.full.compute_unit_locations(we_kilosort) - _ = si.full.compute_template_metrics(we_kilosort) - _ = si.full.compute_noise_levels(we_kilosort) - _ = si.full.compute_principal_components(we_kilosort) - _ = si.full.compute_drift_metrics(we_kilosort) - _ = si.full.compute_tempoate_similarity(we_kilosort) - ( - isi_violations_ratio, - isi_violations_count, - ) = si.full.compute_isi_violations(we_kilosort, isi_threshold_ms=1.5) - (isi_histograms, bins) = si.full.compute_isi_histograms(we_kilosort) - metrics = si.full.compute_quality_metrics( - we_kilosort, - metric_names=[ - "firing_rate", - "snr", - "presence_ratio", - "isi_violation", - "num_spikes", - "amplitude_cutoff", - "amplitude_median", - # "sliding_rp_violation", - "rp_violation", - "drift", - ], - ) - si.exporters.export_report( - we_kilosort, kilosort_dir, n_jobs=-1, chunk_size=30000 - ) - we_savedir = kilosort_dir / "we_kilosort" - we_kilosort.save(we_savedir, n_jobs=-1, chunk_size=30000) + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + recording_file = output_dir / "si_recording.pkl" + sorter_dir = output_dir / key["sorter_name"] + sorting_file = sorter_dir / "si_sorting.pkl" + + si_recording: si.BaseRecording = si.load_extractor(recording_file) + si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file) + + # Extract waveforms + we: si.WaveformExtractor = si.extract_waveforms( + si_recording, + si_sorting, + folder=sorter_dir / "waveform", # The folder where waveforms are cached + ms_before=3.0, + ms_after=4.0, + max_spikes_per_unit=500, + overwrite=True, + **JOB_KWARGS, + ) - metrics_savefile = kilosort_dir / "metrics.csv" - metrics.to_csv(metrics_savefile) + # Calculate QC Metrics + metrics: pd.DataFrame = si.qualitymetrics.compute_quality_metrics( + we, + metric_names=[ + "firing_rate", + "snr", + "presence_ratio", + "isi_violation", + "num_spikes", + "amplitude_cutoff", + "amplitude_median", + "sliding_rp_violation", + "rp_violation", + "drift", + ], + ) + # Add PCA based metrics. These will be added to the metrics dataframe above. + _ = si.postprocessing.compute_principal_components( + waveform_extractor=we, n_components=5, mode="by_channel_local" + ) # TODO: the parameters need to be checked + metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) + # Save results self.insert1( { **key, @@ -357,120 +289,3 @@ def make(self, key): ephys.Clustering.insert1( {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) - - -# def runPreProcessList(preprocess_list, recording): -# # If else -# # need to figure out ordering -# if preprocess_list["Filter"]: -# recording = si.preprocessing.FilterRecording(recording) -# if preprocess_list["BandpassFilter"]: -# recording = si.preprocessing.BandpassFilterRecording(recording) -# if preprocess_list["HighpassFilter"]: -# recording = si.preprocessing.HighpassFilterRecording(recording) -# if preprocess_list["NormalizeByQuantile"]: -# recording = si.preprocessing.NormalizeByQuantileRecording(recording) -# if preprocess_list["Scale"]: -# recording = si.preprocessing.ScaleRecording(recording) -# if preprocess_list["Center"]: -# recording = si.preprocessing.CenterRecording(recording) -# if preprocess_list["ZScore"]: -# recording = si.preprocessing.ZScoreRecording(recording) -# if preprocess_list["Whiten"]: -# recording = si.preprocessing.WhitenRecording(recording) -# if preprocess_list["CommonReference"]: -# recording = si.preprocessing.CommonReferenceRecording(recording) -# if preprocess_list["PhaseShift"]: -# recording = si.preprocessing.PhaseShiftRecording(recording) -# elif preprocess_list["Rectify"]: -# recording = si.preprocessing.RectifyRecording(recording) -# elif preprocess_list["Clip"]: -# recording = si.preprocessing.ClipRecording(recording) -# elif preprocess_list["BlankSaturation"]: -# recording = si.preprocessing.BlankSaturationRecording(recording) -# elif preprocess_list["RemoveArtifacts"]: -# recording = si.preprocessing.RemoveArtifactsRecording(recording) -# elif preprocess_list["RemoveBadChannels"]: -# recording = si.preprocessing.RemoveBadChannelsRecording(recording) -# elif preprocess_list["ZeroChannelPad"]: -# recording = si.preprocessing.ZeroChannelPadRecording(recording) -# elif preprocess_list["DeepInterpolation"]: -# recording = si.preprocessing.DeepInterpolationRecording(recording) -# elif preprocess_list["Resample"]: -# recording = si.preprocessing.ResampleRecording(recording) - - -def mimic_IBLdestriping_modified(recording): - # From SpikeInterface Implementation (https://spikeinterface.readthedocs.io/en/latest/how_to/analyse_neuropixels.html) - recording = si.full.highpass_filter(recording, freq_min=400.0) - bad_channel_ids, channel_labels = si.full.detect_bad_channels(recording) - # For IBL destriping interpolate bad channels - recording = recording.remove_channels(bad_channel_ids) - recording = si.full.phase_shift(recording) - recording = si.full.common_reference( - recording, operator="median", reference="global" - ) - return recording - - -def mimic_IBLdestriping(recording): - # From International Brain Laboratory. “Spike sorting pipeline for the International Brain Laboratory”. 4 May 2022. 9 Jun 2022. - recording = si.full.highpass_filter(recording, freq_min=400.0) - bad_channel_ids, channel_labels = si.full.detect_bad_channels(recording) - # For IBL destriping interpolate bad channels - recording = si.preprocessing.interpolate_bad_channels(bad_channel_ids) - recording = si.full.phase_shift(recording) - # For IBL destriping use highpass_spatial_filter used instead of common reference - recording = si.full.highpass_spatial_filter( - recording, operator="median", reference="global" - ) - return recording - - -def mimic_catGT(sglx_recording): - sglx_recording = si.full.phase_shift(sglx_recording) - sglx_recording = si.full.common_reference( - sglx_recording, operator="median", reference="global" - ) - return sglx_recording - - -## Example SI parameter set -""" -{'detect_threshold': 6, - 'projection_threshold': [10, 4], - 'preclust_threshold': 8, - 'car': True, - 'minFR': 0.02, - 'minfr_goodchannels': 0.1, - 'nblocks': 5, - 'sig': 20, - 'freq_min': 150, - 'sigmaMask': 30, - 'nPCs': 3, - 'ntbuff': 64, - 'nfilt_factor': 4, - 'NT': None, - 'do_correction': True, - 'wave_length': 61, - 'keep_good_only': False, - 'PreProcessing_params': {'Filter': False, - 'BandpassFilter': True, - 'HighpassFilter': False, - 'NotchFilter': False, - 'NormalizeByQuantile': False, - 'Scale': False, - 'Center': False, - 'ZScore': False, - 'Whiten': False, - 'CommonReference': False, - 'PhaseShift': False, - 'Rectify': False, - 'Clip': False, - 'BlankSaturation': False, - 'RemoveArtifacts': False, - 'RemoveBadChannels': False, - 'ZeroChannelPad': False, - 'DeepInterpolation': False, - 'Resample': False}} -""" From f6e3e4624255b9b42e89de3e520c8696bc60089f Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 2 Jan 2024 18:04:25 -0600 Subject: [PATCH 051/204] fix: :bug: fix input/output data directory --- .../spike_sorting/si_clustering.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 935d7360..80449c88 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -18,6 +18,7 @@ - quality_metrics """ +import pathlib from datetime import datetime import datajoint as dj @@ -111,11 +112,19 @@ def make(self, key): ephys.ClusteringTask.update1( {**key, "clustering_output_dir": output_dir.as_posix()} ) - output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + output_full_dir = find_full_path( + ephys.get_ephys_root_data_dir(), output_dir + ) # output directory in the processed data directory # Create SI recording extractor object + data_dir = ( + ephys.get_ephys_root_data_dir()[0] / pathlib.Path(output_dir).parent + ) # raw data directory + stream_names, stream_ids = si.extractors.get_neo_streams( + acq_software.strip().lower(), folder_path=data_dir + ) si_recording: si.BaseRecording = SI_READERS[acq_software]( - folder_path=output_dir + folder_path=data_dir, stream_name=stream_names[0] ) # Add probe information to recording object @@ -142,7 +151,7 @@ def make(self, key): "IBLdestriping": mimic_IBLdestriping, "IBLdestriping_modified": mimic_IBLdestriping_modified, }[preprocessing_method](si_recording) - recording_file = output_dir / "si_recording.pkl" + recording_file = output_full_dir / "si_recording.pkl" si_recording.dump_to_pickle(file_path=recording_file) self.insert1( From e1c0d689d6b7958c231389e8d11b7ef2e326657f Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 3 Jan 2024 11:47:03 -0600 Subject: [PATCH 052/204] check for presence of recording file --- .../spike_sorting/si_clustering.py | 107 ++++++++++-------- 1 file changed, 57 insertions(+), 50 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 80449c88..8a5adffb 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -25,7 +25,7 @@ import pandas as pd import probeinterface as pi import spikeinterface as si -from element_interface.utils import find_full_path, find_root_directory +from element_interface.utils import find_full_path from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from element_array_ephys import get_logger, probe, readers @@ -112,58 +112,65 @@ def make(self, key): ephys.ClusteringTask.update1( {**key, "clustering_output_dir": output_dir.as_posix()} ) + output_dir = pathlib.Path(output_dir) output_full_dir = find_full_path( - ephys.get_ephys_root_data_dir(), output_dir - ) # output directory in the processed data directory - - # Create SI recording extractor object - data_dir = ( - ephys.get_ephys_root_data_dir()[0] / pathlib.Path(output_dir).parent - ) # raw data directory - stream_names, stream_ids = si.extractors.get_neo_streams( - acq_software.strip().lower(), folder_path=data_dir - ) - si_recording: si.BaseRecording = SI_READERS[acq_software]( - folder_path=data_dir, stream_name=stream_names[0] - ) - - # Add probe information to recording object - electrode_config_key = ( - probe.ElectrodeConfig * ephys.EphysRecording & key - ).fetch1("KEY") - electrodes_df = ( - ( - probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode - & electrode_config_key + ephys.get_ephys_root_data_dir(), output_dir.parent + ) # recording object will be stored in the parent dir since it can be re-used for multiple sorters + + recording_file = ( + output_full_dir / "si_recording.pkl" + ) # recording cache to be created for each key + + if not recording_file.exists(): # skip if si_recording.pkl already exists + # Create SI recording extractor object + data_dir = ( + ephys.get_ephys_root_data_dir()[0] / output_dir.parent + ) # raw data directory + stream_names, stream_ids = si.extractors.get_neo_streams( + acq_software.strip().lower(), folder_path=data_dir + ) + si_recording: si.BaseRecording = SI_READERS[acq_software]( + folder_path=data_dir, stream_name=stream_names[0] ) - .fetch(format="frame") - .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] - ) - - # Create SI probe object - si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) - si_recording.set_probe(probe=si_probe, in_place=True) - - # Run preprocessing and save results to output folder - preprocessing_method = "catGT" # where to load this info? - si_recording = { - "catGT": mimic_catGT, - "IBLdestriping": mimic_IBLdestriping, - "IBLdestriping_modified": mimic_IBLdestriping_modified, - }[preprocessing_method](si_recording) - recording_file = output_full_dir / "si_recording.pkl" - si_recording.dump_to_pickle(file_path=recording_file) - self.insert1( - { - **key, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) + # Add probe information to recording object + electrode_config_key = ( + probe.ElectrodeConfig * ephys.EphysRecording & key + ).fetch1("KEY") + electrodes_df = ( + ( + probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode + & electrode_config_key + ) + .fetch(format="frame") + .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] + ) + channels_details = ephys.get_recording_channels_details(key) + + # Create SI probe object + si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) + si_probe.set_device_channel_indices(channels_details["channel_ind"]) + si_recording.set_probe(probe=si_probe, in_place=True) + + # Run preprocessing and save results to output folder + preprocessing_method = "catGT" # where to load this info? + si_recording = { + "catGT": mimic_catGT, + "IBLdestriping": mimic_IBLdestriping, + "IBLdestriping_modified": mimic_IBLdestriping_modified, + }[preprocessing_method](si_recording) + si_recording.dump_to_pickle(file_path=recording_file) + + self.insert1( + { + **key, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) @schema From 653e7e84bcacd3cf7ae382e5236b732db500ad06 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 3 Jan 2024 15:05:53 -0600 Subject: [PATCH 053/204] fix: :bug: fix path & typo --- element_array_ephys/spike_sorting/si_clustering.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_clustering.py index 8a5adffb..32804645 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_clustering.py @@ -190,8 +190,8 @@ def make(self, key): # Load recording object. output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - recording_file = output_dir / "si_recording.pkl" + output_full_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + recording_file = output_full_dir.parent / "si_recording.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) # Get sorter method and create output directory. @@ -199,9 +199,9 @@ def make(self, key): ephys.ClusteringTask * ephys.ClusteringParamSet & key ).fetch1("clustering_method", "params") sorter_name = ( - "kilosort_2_5" if clustering_method == "kilsort2.5" else clustering_method + "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) - sorter_dir = output_dir / sorter_name + sorter_dir = output_full_dir / sorter_name # Run sorting si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( From 8c25bd21f69bb5151508729f559f7515b8fc3d08 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 5 Jan 2024 12:56:42 -0600 Subject: [PATCH 054/204] code review --- .../{preprocessing.py => si_preprocessing.py} | 2 +- .../{si_clustering.py => si_spike_sorting.py} | 131 +++++++++--------- 2 files changed, 70 insertions(+), 63 deletions(-) rename element_array_ephys/spike_sorting/{preprocessing.py => si_preprocessing.py} (98%) rename element_array_ephys/spike_sorting/{si_clustering.py => si_spike_sorting.py} (72%) diff --git a/element_array_ephys/spike_sorting/preprocessing.py b/element_array_ephys/spike_sorting/si_preprocessing.py similarity index 98% rename from element_array_ephys/spike_sorting/preprocessing.py rename to element_array_ephys/spike_sorting/si_preprocessing.py index 77a95792..2edf443d 100644 --- a/element_array_ephys/spike_sorting/preprocessing.py +++ b/element_array_ephys/spike_sorting/si_preprocessing.py @@ -37,7 +37,7 @@ def mimic_IBLdestriping_modified(recording): return recording -_preprocessing_function = { +preprocessing_function_mapping = { "catGT": mimic_catGT, "IBLdestriping": mimic_IBLdestriping, "IBLdestriping_modified": mimic_IBLdestriping_modified, diff --git a/element_array_ephys/spike_sorting/si_clustering.py b/element_array_ephys/spike_sorting/si_spike_sorting.py similarity index 72% rename from element_array_ephys/spike_sorting/si_clustering.py rename to element_array_ephys/spike_sorting/si_spike_sorting.py index 32804645..f491b5b5 100644 --- a/element_array_ephys/spike_sorting/si_clustering.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -30,11 +30,7 @@ from element_array_ephys import get_logger, probe, readers -from .preprocessing import ( - mimic_catGT, - mimic_IBLdestriping, - mimic_IBLdestriping_modified, -) +from . import si_preprocessing log = get_logger(__name__) @@ -100,9 +96,13 @@ def make(self, key): execution_time = datetime.utcnow() # Set the output directory - acq_software, output_dir = ( - ephys.ClusteringTask * ephys.EphysRecording & key - ).fetch1("acq_software", "clustering_output_dir") + acq_software, clustering_method, params = ( + ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key + ).fetch1("acq_software", "clustering_method", "params") + + for req_key in ("SI_PREPROCESSING_METHOD", "SI_SORTING_PARAMS", "SI_WAVEFORM_EXTRACTION_PARAMS", "SI_QUALITY_METRICS_PARAMS"): + if req_key not in params: + raise ValueError(f"{req_key} must be defined in ClusteringParamSet for SpikeInterface execution") if not output_dir: output_dir = ephys.ClusteringTask.infer_output_dir( @@ -114,63 +114,68 @@ def make(self, key): ) output_dir = pathlib.Path(output_dir) output_full_dir = find_full_path( - ephys.get_ephys_root_data_dir(), output_dir.parent - ) # recording object will be stored in the parent dir since it can be re-used for multiple sorters + ephys.get_ephys_root_data_dir(), output_dir + ) recording_file = ( output_full_dir / "si_recording.pkl" ) # recording cache to be created for each key - if not recording_file.exists(): # skip if si_recording.pkl already exists - # Create SI recording extractor object - data_dir = ( - ephys.get_ephys_root_data_dir()[0] / output_dir.parent - ) # raw data directory - stream_names, stream_ids = si.extractors.get_neo_streams( - acq_software.strip().lower(), folder_path=data_dir - ) - si_recording: si.BaseRecording = SI_READERS[acq_software]( - folder_path=data_dir, stream_name=stream_names[0] - ) + # Create SI recording extractor object + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) + spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) + spikeglx_recording.validate_file("ap") + data_dir = spikeglx_meta_filepath.parent + elif acq_software == "Open Ephys": + oe_probe = ephys.get_openephys_probe_data(key) + assert len(oe_probe.recording_info["recording_files"]) == 1 + data_dir = oe_probe.recording_info["recording_files"][0] + else: + raise NotImplementedError(f"Not implemented for {acq_software}") + + stream_names, stream_ids = si.extractors.get_neo_streams( + acq_software.strip().lower(), folder_path=data_dir + ) + si_recording: si.BaseRecording = SI_READERS[acq_software]( + folder_path=data_dir, stream_name=stream_names[0] + ) - # Add probe information to recording object - electrode_config_key = ( - probe.ElectrodeConfig * ephys.EphysRecording & key - ).fetch1("KEY") - electrodes_df = ( - ( - probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode - & electrode_config_key - ) - .fetch(format="frame") - .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] - ) - channels_details = ephys.get_recording_channels_details(key) - - # Create SI probe object - si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) - si_probe.set_device_channel_indices(channels_details["channel_ind"]) - si_recording.set_probe(probe=si_probe, in_place=True) - - # Run preprocessing and save results to output folder - preprocessing_method = "catGT" # where to load this info? - si_recording = { - "catGT": mimic_catGT, - "IBLdestriping": mimic_IBLdestriping, - "IBLdestriping_modified": mimic_IBLdestriping_modified, - }[preprocessing_method](si_recording) - si_recording.dump_to_pickle(file_path=recording_file) - - self.insert1( - { - **key, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } + # Add probe information to recording object + electrode_config_key = ( + probe.ElectrodeConfig * ephys.EphysRecording & key + ).fetch1("KEY") + electrodes_df = ( + ( + probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode + & electrode_config_key ) + .fetch(format="frame") + .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] + ) + channels_details = ephys.get_recording_channels_details(key) + + # Create SI probe object + si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) + si_probe.set_device_channel_indices(channels_details["channel_ind"]) + si_recording.set_probe(probe=si_probe, in_place=True) + + # Run preprocessing and save results to output folder + preprocessing_method = params["SI_PREPROCESSING_METHOD"] + si_preproc_func = si_preprocessing.preprocessing_function_mapping[preprocessing_method] + si_recording = si_preproc_func(si_recording) + si_recording.dump_to_pickle(file_path=recording_file) + + self.insert1( + { + **key, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) @schema @@ -203,6 +208,8 @@ def make(self, key): ) sorter_dir = output_full_dir / sorter_name + si_sorting_params = params["SI_SORTING_PARAMS"] + # Run sorting si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( sorter_name=sorter_name, @@ -210,7 +217,7 @@ def make(self, key): output_folder=sorter_dir, verbse=True, docker_image=True, - **params, + **si_sorting_params, ) # Run sorting @@ -255,14 +262,14 @@ def make(self, key): si_recording: si.BaseRecording = si.load_extractor(recording_file) si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file) + si_waveform_extraction_params = params["SI_WAVEFORM_EXTRACTION_PARAMS"] + # Extract waveforms we: si.WaveformExtractor = si.extract_waveforms( si_recording, si_sorting, folder=sorter_dir / "waveform", # The folder where waveforms are cached - ms_before=3.0, - ms_after=4.0, - max_spikes_per_unit=500, + **si_waveform_extraction_params overwrite=True, **JOB_KWARGS, ) From fd5e9faf17ddac4c6dc76a904e0d27e2ced3f6ae Mon Sep 17 00:00:00 2001 From: kushalbakshi Date: Wed, 10 Jan 2024 16:20:18 -0600 Subject: [PATCH 055/204] Minor fixes --- element_array_ephys/ephys_acute.py | 13 +++++++++---- element_array_ephys/ephys_no_curation.py | 6 +++--- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py index 0a213db8..cbafb194 100644 --- a/element_array_ephys/ephys_acute.py +++ b/element_array_ephys/ephys_acute.py @@ -126,7 +126,7 @@ class AcquisitionSoftware(dj.Lookup): acq_software ( varchar(24) ): Acquisition software, e.g,. SpikeGLX, OpenEphys """ - definition = """ # Software used for recording of neuropixels probes + definition = """ # Name of software used for recording of neuropixels probes - SpikeGLX or Open Ephys acq_software: varchar(24) """ contents = zip(["SpikeGLX", "Open Ephys"]) @@ -180,7 +180,10 @@ def auto_generate_entries(cls, session_key): "probe_type": spikeglx_meta.probe_model, "probe": spikeglx_meta.probe_SN, } - if probe_key["probe"] not in [p["probe"] for p in probe_list]: + if ( + probe_key["probe"] not in [p["probe"] for p in probe_list] + and probe_key not in probe.Probe() + ): probe_list.append(probe_key) probe_dir = meta_filepath.parent @@ -204,7 +207,10 @@ def auto_generate_entries(cls, session_key): "probe_type": oe_probe.probe_model, "probe": oe_probe.probe_SN, } - if probe_key["probe"] not in [p["probe"] for p in probe_list]: + if ( + probe_key["probe"] not in [p["probe"] for p in probe_list] + and probe_key not in probe.Probe() + ): probe_list.append(probe_key) probe_insertion_list.append( { @@ -533,7 +539,6 @@ def make(self, key): - 1 : 0 : -self._skip_channel_counts ] - # (sample x channel) lfp = oe_probe.lfp_timeseries[:, lfp_channel_ind] lfp = ( lfp * np.array(oe_probe.lfp_meta["channels_gains"])[lfp_channel_ind] diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 4d052169..22b76c39 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -58,7 +58,6 @@ def activate( global _linking_module _linking_module = linking_module - # activate probe.activate( probe_schema_name, create_schema=create_schema, create_tables=create_tables ) @@ -315,8 +314,9 @@ def make(self, key): break else: raise FileNotFoundError( - "Ephys recording data not found!" - " Neither SpikeGLX nor Open Ephys recording files found" + f"Ephys recording data not found!" + f" Neither SpikeGLX nor Open Ephys recording files found" + f" in {session_dir}" ) supported_probe_types = probe.ProbeType.fetch("probe_type") From 90d1ba4e4c87493c4e79ae93ed139ee0f5e3218f Mon Sep 17 00:00:00 2001 From: JaerongA Date: Thu, 1 Feb 2024 15:27:57 -0600 Subject: [PATCH 056/204] feat: :sparkles: modify QualityMetrics make function --- element_array_ephys/ephys_no_curation.py | 34 +++++++++++++++--------- 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 8ee7ee8b..ca293d95 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1358,24 +1358,34 @@ class Waveform(dj.Part): def make(self, key): """Populates tables with quality metrics data.""" + # Load metrics.csv output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - metric_fp = kilosort_dir / "metrics.csv" - rename_dict = { - "isi_viol": "isi_violation", - "num_viol": "number_violation", - "contam_rate": "contamination_rate", - } - + output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + metric_fp = output_dir / "metrics.csv" if not metric_fp.exists(): raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") - metrics_df = pd.read_csv(metric_fp) - metrics_df.set_index("cluster_id", inplace=True) + + # Conform the dataframe to match the table definition + if "cluster_id" in metrics_df.columns: + metrics_df.set_index("cluster_id", inplace=True) + else: + metrics_df.rename( + columns={metrics_df.columns[0]: "cluster_id"}, inplace=True + ) + metrics_df.set_index("cluster_id", inplace=True) metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True) metrics_df.columns = metrics_df.columns.str.lower() - metrics_df.rename(columns=rename_dict, inplace=True) + + metrics_df.rename( + columns={ + "isi_viol": "isi_violation", + "num_viol": "number_violation", + "contam_rate": "contamination_rate", + }, + inplace=True, + ) + metrics_list = [ dict(metrics_df.loc[unit_key["unit"]], **unit_key) for unit_key in (CuratedClustering.Unit & key).fetch("KEY") From cacefaceb0246bbc7b1a86b7e9afbaff335f5a86 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 6 Feb 2024 21:02:47 +0000 Subject: [PATCH 057/204] update si_spike_sorting.PreProcessing make function --- .../spike_sorting/si_spike_sorting.py | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index f491b5b5..82729fe7 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -96,13 +96,20 @@ def make(self, key): execution_time = datetime.utcnow() # Set the output directory - acq_software, clustering_method, params = ( + acq_software, output_dir, params = ( ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method", "params") - - for req_key in ("SI_PREPROCESSING_METHOD", "SI_SORTING_PARAMS", "SI_WAVEFORM_EXTRACTION_PARAMS", "SI_QUALITY_METRICS_PARAMS"): + ).fetch1("acq_software", "clustering_output_dir", "params") + + for req_key in ( + "SI_SORTING_PARAMS", + "SI_PREPROCESSING_METHOD", + "SI_WAVEFORM_EXTRACTION_PARAMS", + "SI_QUALITY_METRICS_PARAMS", + ): if req_key not in params: - raise ValueError(f"{req_key} must be defined in ClusteringParamSet for SpikeInterface execution") + raise ValueError( + f"{req_key} must be defined in ClusteringParamSet for SpikeInterface execution" + ) if not output_dir: output_dir = ephys.ClusteringTask.infer_output_dir( @@ -113,9 +120,7 @@ def make(self, key): {**key, "clustering_output_dir": output_dir.as_posix()} ) output_dir = pathlib.Path(output_dir) - output_full_dir = find_full_path( - ephys.get_ephys_root_data_dir(), output_dir - ) + output_full_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) recording_file = ( output_full_dir / "si_recording.pkl" @@ -124,7 +129,9 @@ def make(self, key): # Create SI recording extractor object if acq_software == "SpikeGLX": spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) + spikeglx_recording = readers.spikeglx.SpikeGLX( + spikeglx_meta_filepath.parent + ) spikeglx_recording.validate_file("ap") data_dir = spikeglx_meta_filepath.parent elif acq_software == "Open Ephys": @@ -161,8 +168,9 @@ def make(self, key): si_recording.set_probe(probe=si_probe, in_place=True) # Run preprocessing and save results to output folder - preprocessing_method = params["SI_PREPROCESSING_METHOD"] - si_preproc_func = si_preprocessing.preprocessing_function_mapping[preprocessing_method] + si_preproc_func = si_preprocessing.preprocessing_function_mapping[ + params["SI_PREPROCESSING_METHOD"] + ] si_recording = si_preproc_func(si_recording) si_recording.dump_to_pickle(file_path=recording_file) From f98e1ed332c7c5ca61b9321e461c0101b063a064 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 7 Feb 2024 17:20:15 +0000 Subject: [PATCH 058/204] update SIClustering make function --- .../spike_sorting/si_spike_sorting.py | 26 ++++++++----------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 82729fe7..a2f2db24 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -25,11 +25,10 @@ import pandas as pd import probeinterface as pi import spikeinterface as si +from element_array_ephys import get_logger, probe, readers from element_interface.utils import find_full_path from spikeinterface import exporters, postprocessing, qualitymetrics, sorters -from element_array_ephys import get_logger, probe, readers - from . import si_preprocessing log = get_logger(__name__) @@ -202,34 +201,31 @@ def make(self, key): execution_time = datetime.utcnow() # Load recording object. - output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - output_full_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - recording_file = output_full_dir.parent / "si_recording.pkl" + clustering_method, output_dir, params = ( + ephys.ClusteringTask * ephys.ClusteringParamSet & key + ).fetch1("clustering_method", "clustering_output_dir", "params") + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + recording_file = output_dir / "si_recording.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) # Get sorter method and create output directory. - clustering_method, params = ( - ephys.ClusteringTask * ephys.ClusteringParamSet & key - ).fetch1("clustering_method", "params") sorter_name = ( "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) - sorter_dir = output_full_dir / sorter_name - - si_sorting_params = params["SI_SORTING_PARAMS"] # Run sorting si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( sorter_name=sorter_name, recording=si_recording, - output_folder=sorter_dir, - verbse=True, + output_folder=output_dir / sorter_name, + remove_existing_folder=True, + verbose=True, docker_image=True, - **si_sorting_params, + **params.get("SI_SORTING_PARAMS", {}), ) # Run sorting - sorting_save_path = sorter_dir / "si_sorting.pkl" + sorting_save_path = output_dir / "si_sorting.pkl" si_sorting.dump_to_pickle(sorting_save_path) self.insert1( From 7a060ef5b875dd74189841aa5a72d3eea55d7dda Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 7 Feb 2024 17:21:16 +0000 Subject: [PATCH 059/204] update PostProcessing make function --- .../spike_sorting/si_spike_sorting.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index a2f2db24..5eb2b822 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -254,28 +254,26 @@ class PostProcessing(dj.Imported): def make(self, key): execution_time = datetime.utcnow() - JOB_KWARGS = dict(n_jobs=-1, chunk_size=30000) # Load sorting & recording object. - output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") + output_dir, params = (ephys.ClusteringTask & key).fetch1( + "clustering_output_dir", "params" + ) output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) recording_file = output_dir / "si_recording.pkl" - sorter_dir = output_dir / key["sorter_name"] - sorting_file = sorter_dir / "si_sorting.pkl" + sorting_file = output_dir / "si_sorting.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file) - si_waveform_extraction_params = params["SI_WAVEFORM_EXTRACTION_PARAMS"] - # Extract waveforms we: si.WaveformExtractor = si.extract_waveforms( si_recording, si_sorting, - folder=sorter_dir / "waveform", # The folder where waveforms are cached - **si_waveform_extraction_params + folder=output_dir / "waveform", # The folder where waveforms are cached overwrite=True, - **JOB_KWARGS, + **params.get("SI_WAVEFORM_EXTRACTION_PARAMS", {}), + **params.get("SI_JOB_KWARGS", {"n_jobs": -1, "chunk_size": 30000}), ) # Calculate QC Metrics @@ -296,9 +294,11 @@ def make(self, key): ) # Add PCA based metrics. These will be added to the metrics dataframe above. _ = si.postprocessing.compute_principal_components( - waveform_extractor=we, n_components=5, mode="by_channel_local" - ) # TODO: the parameters need to be checked + waveform_extractor=we, **params.get("SI_QUALITY_METRICS_PARAMS", None) + ) + # Save the output (metrics.csv to the output dir) metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) + metrics.to_csv(output_dir / "metrics.csv") # Save results self.insert1( @@ -312,7 +312,7 @@ def make(self, key): } ) - # all finished, insert this `key` into ephys.Clustering + # Once finished, insert this `key` into ephys.Clustering ephys.Clustering.insert1( {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) From 6daf0c5a1f3f40d3700ce09bfa047326b4477cab Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 7 Feb 2024 17:01:52 -0600 Subject: [PATCH 060/204] feat: :sparkles: add n.a. to ClusterQualityLabel --- element_array_ephys/ephys_no_curation.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index ca293d95..aa743598 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -703,6 +703,7 @@ class ClusterQualityLabel(dj.Lookup): ("ok", "probably a single unit, but could be contaminated"), ("mua", "multi-unit activity"), ("noise", "bad unit"), + ("n.a.", "not available"), ] From e41ff1daeaddeb2847b426893f430906a86e91d6 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 7 Feb 2024 17:15:05 -0600 Subject: [PATCH 061/204] extract all waveforms --- element_array_ephys/spike_sorting/si_spike_sorting.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 5eb2b822..432b6c10 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -271,6 +271,7 @@ def make(self, key): si_recording, si_sorting, folder=output_dir / "waveform", # The folder where waveforms are cached + max_spikes_per_unit=None, overwrite=True, **params.get("SI_WAVEFORM_EXTRACTION_PARAMS", {}), **params.get("SI_JOB_KWARGS", {"n_jobs": -1, "chunk_size": 30000}), From e8d9854f4014302248d483604faaa2b4f2858fc2 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 7 Feb 2024 19:15:34 -0600 Subject: [PATCH 062/204] feat: :sparkles: modify CuratedClustering make function for spike interface --- element_array_ephys/ephys_no_curation.py | 195 ++++++++++++++++------- 1 file changed, 138 insertions(+), 57 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index aa743598..70ce87cf 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -959,75 +959,156 @@ class Unit(dj.Part): def make(self, key): """Automated population of Unit information.""" output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - acq_software, sample_rate = (EphysRecording & key).fetch1( - "acq_software", "sampling_rate" - ) + if (output_dir / "waveform").exists(): # read from spikeinterface outputs + we: si.WaveformExtractor = si.load_waveforms( + output_dir / "waveform", with_recording=False + ) + si_sorting: si.sorters.BaseSorter = si.load_extractor( + output_dir / "sorting.pkl" + ) - sample_rate = kilosort_dataset.data["params"].get("sample_rate", sample_rate) + unit_peak_channel_map: dict[int, int] = si.get_template_extremum_channel( + we, outputs="index" + ) # {unit: peak_channel_index} - # ---------- Unit ---------- - # -- Remove 0-spike units - withspike_idx = [ - i - for i, u in enumerate(kilosort_dataset.data["cluster_ids"]) - if (kilosort_dataset.data["spike_clusters"] == u).any() - ] - valid_units = kilosort_dataset.data["cluster_ids"][withspike_idx] - valid_unit_labels = kilosort_dataset.data["cluster_groups"][withspike_idx] - # -- Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map(key, acq_software) - - # -- Spike-times -- - # spike_times_sec_adj > spike_times_sec > spike_times - spike_time_key = ( - "spike_times_sec_adj" - if "spike_times_sec_adj" in kilosort_dataset.data - else "spike_times_sec" - if "spike_times_sec" in kilosort_dataset.data - else "spike_times" - ) - spike_times = kilosort_dataset.data[spike_time_key] - kilosort_dataset.extract_spike_depths() + spike_count_dict = dict[int, int] = si_sorting.count_num_spikes_per_unit() + # {unit: spike_count} - # -- Spike-sites and Spike-depths -- - spike_sites = np.array( - [ - channel2electrodes[s]["electrode"] - for s in kilosort_dataset.data["spike_sites"] - ] - ) - spike_depths = kilosort_dataset.data["spike_depths"] - - # -- Insert unit, label, peak-chn - units = [] - for unit, unit_lbl in zip(valid_units, valid_unit_labels): - if (kilosort_dataset.data["spike_clusters"] == unit).any(): - unit_channel, _ = kilosort_dataset.get_best_channel(unit) - unit_spike_times = ( - spike_times[kilosort_dataset.data["spike_clusters"] == unit] - / sample_rate - ) - spike_count = len(unit_spike_times) + spikes = si_sorting.to_spike_vector( + extremum_channel_inds=unit_peak_channel_map + ) + + # Get electrode info + electrode_config_key = ( + EphysRecording * probe.ElectrodeConfig & key + ).fetch1("KEY") + + electrode_query = ( + probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode + & electrode_config_key + ) + channel2electrode_map = dict( + zip(*electrode_query.fetch("channel", "electrode")) + ) + + # Get channel to electrode mapping + channel2depth_map = dict(zip(*electrode_query.fetch("channel", "y_coord"))) + + peak_electrode_ind = np.array( + [ + channel2electrode_map[unit_peak_channel_map[unit_id]] + for unit_id in si_sorting.unit_ids + ] + ) + + # Get channel to depth mapping + electrode_depth_ind = np.array( + [ + channel2depth_map[unit_peak_channel_map[unit_id]] + for unit_id in si_sorting.unit_ids + ] + ) + spikes["electrode"] = peak_electrode_ind[spikes["unit_index"]] + spikes["depth"] = electrode_depth_ind[spikes["unit_index"]] + + units = [] + for unit_id in si_sorting.unit_ids: + unit_id = int(unit_id) units.append( { - "unit": unit, - "cluster_quality_label": unit_lbl, - **channel2electrodes[unit_channel], - "spike_times": unit_spike_times, - "spike_count": spike_count, - "spike_sites": spike_sites[ - kilosort_dataset.data["spike_clusters"] == unit + "unit": unit_id, + "cluster_quality_label": "n.a.", + "spike_times": si_sorting.get_unit_spike_train( + unit_id, return_times=True + ), + "spike_count": spike_count_dict[unit_id], + "spike_sites": spikes["electrode"][ + spikes["unit_index"] == unit_id ], - "spike_depths": spike_depths[ - kilosort_dataset.data["spike_clusters"] == unit + "spike_depths": spikes["depth"][ + spikes["unit_index"] == unit_id ], } ) + else: + kilosort_dataset = kilosort.Kilosort(output_dir) + acq_software, sample_rate = (EphysRecording & key).fetch1( + "acq_software", "sampling_rate" + ) + + sample_rate = kilosort_dataset.data["params"].get( + "sample_rate", sample_rate + ) + + # ---------- Unit ---------- + # -- Remove 0-spike units + withspike_idx = [ + i + for i, u in enumerate(kilosort_dataset.data["cluster_ids"]) + if (kilosort_dataset.data["spike_clusters"] == u).any() + ] + valid_units = kilosort_dataset.data["cluster_ids"][withspike_idx] + valid_unit_labels = kilosort_dataset.data["cluster_groups"][withspike_idx] + + # -- Spike-times -- + # spike_times_sec_adj > spike_times_sec > spike_times + spike_time_key = ( + "spike_times_sec_adj" + if "spike_times_sec_adj" in kilosort_dataset.data + else ( + "spike_times_sec" + if "spike_times_sec" in kilosort_dataset.data + else "spike_times" + ) + ) + spike_times = kilosort_dataset.data[spike_time_key] + kilosort_dataset.extract_spike_depths() + + # Get channel and electrode-site mapping + channel2electrodes = get_neuropixels_channel2electrode_map( + key, acq_software + ) + + # -- Spike-sites and Spike-depths -- + spike_sites = np.array( + [ + channel2electrodes[s]["electrode"] + for s in kilosort_dataset.data["spike_sites"] + ] + ) + spike_depths = kilosort_dataset.data["spike_depths"] + + # -- Insert unit, label, peak-chn + units = [] + for unit, unit_lbl in zip(valid_units, valid_unit_labels): + if (kilosort_dataset.data["spike_clusters"] == unit).any(): + unit_channel, _ = kilosort_dataset.get_best_channel(unit) + unit_spike_times = ( + spike_times[kilosort_dataset.data["spike_clusters"] == unit] + / sample_rate + ) + spike_count = len(unit_spike_times) + + units.append( + { + "unit": unit, + "cluster_quality_label": unit_lbl, + **channel2electrodes[unit_channel], + "spike_times": unit_spike_times, + "spike_count": spike_count, + "spike_sites": spike_sites[ + kilosort_dataset.data["spike_clusters"] == unit + ], + "spike_depths": spike_depths[ + kilosort_dataset.data["spike_clusters"] == unit + ], + } + ) + self.insert1(key) self.Unit.insert([{**key, **u} for u in units]) From 00b82f81017fdb92459cd334cda8bb3dc49b0fde Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 7 Feb 2024 19:17:15 -0600 Subject: [PATCH 063/204] refactor: :recycle: import si module & re-organize imports --- element_array_ephys/ephys_no_curation.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 70ce87cf..63e72951 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1,17 +1,18 @@ -import datajoint as dj +import gc +import importlib +import inspect import pathlib import re -import numpy as np -import inspect -import importlib -import gc from decimal import Decimal -import pandas as pd -from element_interface.utils import find_root_directory, find_full_path, dict_to_uuid -from .readers import spikeglx, kilosort, openephys -from element_array_ephys import probe, get_logger, ephys_report +import datajoint as dj +import numpy as np +import pandas as pd +from element_array_ephys import ephys_report, get_logger, probe +from element_interface.utils import (dict_to_uuid, find_full_path, + find_root_directory) +from .readers import kilosort, openephys, spikeglx log = get_logger(__name__) @@ -19,8 +20,8 @@ _linking_module = None -import spikeinterface -import spikeinterface.full as si +import spikeinterface as si +from spikeinterface import exporters, postprocessing, qualitymetrics, sorters def activate( From b01c36c81595e8e4cb38e22f2dee146986508b79 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 12 Feb 2024 09:39:04 -0600 Subject: [PATCH 064/204] update WaveformSet ingestion --- element_array_ephys/ephys_no_curation.py | 368 ++++++++++++++--------- 1 file changed, 218 insertions(+), 150 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 63e72951..4887096c 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1168,177 +1168,245 @@ class Waveform(dj.Part): def make(self, key): """Populates waveform tables.""" output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - kilosort_dataset = kilosort.Kilosort(kilosort_dir) + if (output_dir / "waveform").exists(): # read from spikeinterface outputs - acq_software, probe_serial_number = ( - EphysRecording * ProbeInsertion & key - ).fetch1("acq_software", "probe") + we: si.WaveformExtractor = si.load_waveforms( + output_dir / "waveform", with_recording=False + ) + unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( + si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices + ) # {unit: peak_channel_index} - # -- Get channel and electrode-site mapping - recording_key = (EphysRecording & key).fetch1("KEY") - channel2electrodes = get_neuropixels_channel2electrode_map( - recording_key, acq_software - ) + units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit") - # Get all units - units = { - u["unit"]: u - for u in (CuratedClustering.Unit & key).fetch(as_dict=True, order_by="unit") - } + # Get electrode info + electrode_config_key = ( + EphysRecording * probe.ElectrodeConfig & key + ).fetch1("KEY") - waveforms_folder = [ - f for f in kilosort_dir.parent.rglob(r"*/waveforms*") if f.is_dir() - ] + electrode_query = ( + probe.ProbeType.Electrode.proj() * probe.ElectrodeConfig.Electrode + & electrode_config_key + ) + electrode_info = electrode_query.fetch( + "KEY", order_by="electrode", as_dict=True + ) - if (kilosort_dir / "mean_waveforms.npy").exists(): - unit_waveforms = np.load( - kilosort_dir / "mean_waveforms.npy" - ) # unit x channel x sample + # Get mean waveform for each unit from all channels + mean_waveforms = we.get_all_templates( + mode="average" + ) # (unit x sample x channel) - def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - if unit_no in units: - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], unit_waveform - ): - unit_electrode_waveforms.append( - { - **units[unit_no], - **channel2electrodes[channel], - "waveform_mean": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == units[unit_no]["electrode"] - ): - unit_peak_waveform = { - **units[unit_no], - "peak_electrode_waveform": channel_waveform, - } - yield unit_peak_waveform, unit_electrode_waveforms - - # Spike interface mean and peak waveform extraction from we object - - elif len(waveforms_folder) > 0 & (waveforms_folder[0]).exists(): - we_kilosort = si.load_waveforms(waveforms_folder[0].parent) - unit_templates = we_kilosort.get_all_templates() - unit_waveforms = np.reshape( - unit_templates, - ( - unit_templates.shape[1], - unit_templates.shape[3], - unit_templates.shape[2], - ), + unit_peak_waveform = [] + unit_electrode_waveforms = [] + + for unit in units: + unit_peak_waveform.append( + { + **unit, + "peak_electrode_waveform": we.get_template( + unit_id=unit["unit"], mode="average", force_dense=True + )[:, unit_id_to_peak_channel_indices[unit["unit"]][0]], + } + ) + + unit_electrode_waveforms.extend( + [ + { + **unit, + **e, + "waveform_mean": mean_waveforms[ + unit["unit"], :, e["electrode"] + ], + } + for e in electrode_info + ] + ) + + self.insert1(key) + self.PeakWaveform.insert(unit_peak_waveform) + self.Waveform.insert(unit_electrode_waveforms) + + else: + kilosort_dataset = kilosort.Kilosort(output_dir) + + acq_software, probe_serial_number = ( + EphysRecording * ProbeInsertion & key + ).fetch1("acq_software", "probe") + + # -- Get channel and electrode-site mapping + recording_key = (EphysRecording & key).fetch1("KEY") + channel2electrodes = get_neuropixels_channel2electrode_map( + recording_key, acq_software ) - # Approach assumes unit_waveforms was generated correctly (templates are actually the same as mean_waveforms) - def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - if unit_no in units: + # Get all units + units = { + u["unit"]: u + for u in (CuratedClustering.Unit & key).fetch( + as_dict=True, order_by="unit" + ) + } + + waveforms_folder = [ + f for f in output_dir.parent.rglob(r"*/waveforms*") if f.is_dir() + ] + + if (output_dir / "mean_waveforms.npy").exists(): + unit_waveforms = np.load( + output_dir / "mean_waveforms.npy" + ) # unit x channel x sample + + def yield_unit_waveforms(): + for unit_no, unit_waveform in zip( + kilosort_dataset.data["cluster_ids"], unit_waveforms + ): + unit_peak_waveform = {} + unit_electrode_waveforms = [] + if unit_no in units: + for channel, channel_waveform in zip( + kilosort_dataset.data["channel_map"], unit_waveform + ): + unit_electrode_waveforms.append( + { + **units[unit_no], + **channel2electrodes[channel], + "waveform_mean": channel_waveform, + } + ) + if ( + channel2electrodes[channel]["electrode"] + == units[unit_no]["electrode"] + ): + unit_peak_waveform = { + **units[unit_no], + "peak_electrode_waveform": channel_waveform, + } + yield unit_peak_waveform, unit_electrode_waveforms + + # Spike interface mean and peak waveform extraction from we object + + elif len(waveforms_folder) > 0 & (waveforms_folder[0]).exists(): + we_kilosort = si.load_waveforms(waveforms_folder[0].parent) + unit_templates = we_kilosort.get_all_templates() + unit_waveforms = np.reshape( + unit_templates, + ( + unit_templates.shape[1], + unit_templates.shape[3], + unit_templates.shape[2], + ), + ) + + # Approach assumes unit_waveforms was generated correctly (templates are actually the same as mean_waveforms) + def yield_unit_waveforms(): + for unit_no, unit_waveform in zip( + kilosort_dataset.data["cluster_ids"], unit_waveforms + ): + unit_peak_waveform = {} + unit_electrode_waveforms = [] + if unit_no in units: + for channel, channel_waveform in zip( + kilosort_dataset.data["channel_map"], unit_waveform + ): + unit_electrode_waveforms.append( + { + **units[unit_no], + **channel2electrodes[channel], + "waveform_mean": channel_waveform, + } + ) + if ( + channel2electrodes[channel]["electrode"] + == units[unit_no]["electrode"] + ): + unit_peak_waveform = { + **units[unit_no], + "peak_electrode_waveform": channel_waveform, + } + yield unit_peak_waveform, unit_electrode_waveforms + + # Approach not using spike interface templates (ie. taking mean of each unit waveform) + # def yield_unit_waveforms(): + # for unit_id in we_kilosort.unit_ids: + # unit_waveform = np.mean(we_kilosort.get_waveforms(unit_id), 0) + # unit_peak_waveform = {} + # unit_electrode_waveforms = [] + # if unit_id in units: + # for channel, channel_waveform in zip( + # kilosort_dataset.data["channel_map"], unit_waveform + # ): + # unit_electrode_waveforms.append( + # { + # **units[unit_id], + # **channel2electrodes[channel], + # "waveform_mean": channel_waveform, + # } + # ) + # if ( + # channel2electrodes[channel]["electrode"] + # == units[unit_id]["electrode"] + # ): + # unit_peak_waveform = { + # **units[unit_id], + # "peak_electrode_waveform": channel_waveform, + # } + # yield unit_peak_waveform, unit_electrode_waveforms + + else: + if acq_software == "SpikeGLX": + spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) + neuropixels_recording = spikeglx.SpikeGLX( + spikeglx_meta_filepath.parent + ) + elif acq_software == "Open Ephys": + session_dir = find_full_path( + get_ephys_root_data_dir(), get_session_directory(key) + ) + openephys_dataset = openephys.OpenEphys(session_dir) + neuropixels_recording = openephys_dataset.probes[ + probe_serial_number + ] + + def yield_unit_waveforms(): + for unit_dict in units.values(): + unit_peak_waveform = {} + unit_electrode_waveforms = [] + + spikes = unit_dict["spike_times"] + waveforms = neuropixels_recording.extract_spike_waveforms( + spikes, kilosort_dataset.data["channel_map"] + ) # (sample x channel x spike) + waveforms = waveforms.transpose( + (1, 2, 0) + ) # (channel x spike x sample) for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], unit_waveform + kilosort_dataset.data["channel_map"], waveforms ): unit_electrode_waveforms.append( { - **units[unit_no], + **unit_dict, **channel2electrodes[channel], - "waveform_mean": channel_waveform, + "waveform_mean": channel_waveform.mean(axis=0), + "waveforms": channel_waveform, } ) if ( channel2electrodes[channel]["electrode"] - == units[unit_no]["electrode"] + == unit_dict["electrode"] ): unit_peak_waveform = { - **units[unit_no], - "peak_electrode_waveform": channel_waveform, + **unit_dict, + "peak_electrode_waveform": channel_waveform.mean( + axis=0 + ), } - yield unit_peak_waveform, unit_electrode_waveforms - - # Approach not using spike interface templates (ie. taking mean of each unit waveform) - # def yield_unit_waveforms(): - # for unit_id in we_kilosort.unit_ids: - # unit_waveform = np.mean(we_kilosort.get_waveforms(unit_id), 0) - # unit_peak_waveform = {} - # unit_electrode_waveforms = [] - # if unit_id in units: - # for channel, channel_waveform in zip( - # kilosort_dataset.data["channel_map"], unit_waveform - # ): - # unit_electrode_waveforms.append( - # { - # **units[unit_id], - # **channel2electrodes[channel], - # "waveform_mean": channel_waveform, - # } - # ) - # if ( - # channel2electrodes[channel]["electrode"] - # == units[unit_id]["electrode"] - # ): - # unit_peak_waveform = { - # **units[unit_id], - # "peak_electrode_waveform": channel_waveform, - # } - # yield unit_peak_waveform, unit_electrode_waveforms - else: - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - elif acq_software == "Open Ephys": - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - openephys_dataset = openephys.OpenEphys(session_dir) - neuropixels_recording = openephys_dataset.probes[probe_serial_number] - - def yield_unit_waveforms(): - for unit_dict in units.values(): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - - spikes = unit_dict["spike_times"] - waveforms = neuropixels_recording.extract_spike_waveforms( - spikes, kilosort_dataset.data["channel_map"] - ) # (sample x channel x spike) - waveforms = waveforms.transpose( - (1, 2, 0) - ) # (channel x spike x sample) - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], waveforms - ): - unit_electrode_waveforms.append( - { - **unit_dict, - **channel2electrodes[channel], - "waveform_mean": channel_waveform.mean(axis=0), - "waveforms": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == unit_dict["electrode"] - ): - unit_peak_waveform = { - **unit_dict, - "peak_electrode_waveform": channel_waveform.mean( - axis=0 - ), - } - - yield unit_peak_waveform, unit_electrode_waveforms + yield unit_peak_waveform, unit_electrode_waveforms # insert waveform on a per-unit basis to mitigate potential memory issue self.insert1(key) @@ -1448,7 +1516,7 @@ def make(self, key): if not metric_fp.exists(): raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") metrics_df = pd.read_csv(metric_fp) - + # Conform the dataframe to match the table definition if "cluster_id" in metrics_df.columns: metrics_df.set_index("cluster_id", inplace=True) From 853b66f5bd39edd82ed14de635064f24c52855e4 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 13 Feb 2024 11:38:39 -0600 Subject: [PATCH 065/204] Update element_array_ephys/ephys_no_curation.py Co-authored-by: Kushal Bakshi <52367253+kushalbakshi@users.noreply.github.com> --- element_array_ephys/ephys_no_curation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 4887096c..c82f986f 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -319,8 +319,8 @@ def make(self, key): break else: raise FileNotFoundError( - f"Ephys recording data not found!" - f" Neither SpikeGLX nor Open Ephys recording files found" + "Ephys recording data not found!" + "Neither SpikeGLX nor Open Ephys recording files found" ) supported_probe_types = probe.ProbeType.fetch("probe_type") From 67ebf4e994411617826263782142fcfc270b98f0 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 13 Feb 2024 11:38:47 -0600 Subject: [PATCH 066/204] Update element_array_ephys/ephys_no_curation.py Co-authored-by: Kushal Bakshi <52367253+kushalbakshi@users.noreply.github.com> --- element_array_ephys/ephys_no_curation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index c82f986f..0efddf9a 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -618,7 +618,7 @@ class ClusteringParamSet(dj.Lookup): ClusteringMethod (dict): ClusteringMethod primary key. paramset_desc (varchar(128) ): Description of the clustering parameter set. param_set_hash (uuid): UUID hash for the parameter set. - params (longblob) + params (longblob): Set of clustering parameters. """ definition = """ From 5fe60434655f8f0c9f8cb2b2ecaa63e4a3a28e2a Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 13 Feb 2024 11:38:51 -0600 Subject: [PATCH 067/204] Update element_array_ephys/ephys_no_curation.py Co-authored-by: Kushal Bakshi <52367253+kushalbakshi@users.noreply.github.com> --- element_array_ephys/ephys_no_curation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 0efddf9a..92224409 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1488,7 +1488,7 @@ class Waveform(dj.Part): recovery_slope (float): Slope of the regression line fit to first 30 microseconds from peak to tail. spread (float): The range with amplitude over 12-percent of maximum amplitude along the probe. velocity_above (float): inverse velocity of waveform propagation from soma to the top of the probe. - velocity_below (float) inverse velocity of waveform propagation from soma toward the bottom of the probe. + velocity_below (float): inverse velocity of waveform propagation from soma toward the bottom of the probe. """ definition = """ From ac08163cb55ac46d97a1b5995d965be6e8140ed8 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 13 Feb 2024 11:48:41 -0600 Subject: [PATCH 068/204] ci: run test only on the main branch --- .github/workflows/test.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index acaddca0..fec7ce0c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -1,11 +1,13 @@ name: Test on: push: + branches: + - main pull_request: + branches: + - main workflow_dispatch: jobs: - # devcontainer-build: - # uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main tests: runs-on: ubuntu-latest strategy: @@ -31,4 +33,3 @@ jobs: run: | python_version=${{matrix.py_ver}} black element_array_ephys --check --verbose --target-version py${python_version//.} - From e95331c54babe966ccbb6ce902463eb48c869c6d Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 13 Feb 2024 14:33:26 -0600 Subject: [PATCH 069/204] build: :heavy_plus_sign: add spikingcircus dependencies --- setup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f93247b6..ebf5d114 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from os import path -from setuptools import find_packages, setup +from setuptools import find_packages, setup pkg_name = "element_array_ephys" here = path.abspath(path.dirname(__file__)) @@ -16,7 +16,7 @@ setup( name=pkg_name.replace("_", "-"), - python_requires='>=3.7, <3.11', + python_requires=">=3.7, <3.11", version=__version__, # noqa F821 description="Extracellular Array Electrophysiology DataJoint Element", long_description=long_description, @@ -50,5 +50,6 @@ ], "nwb": ["dandi", "neuroconv[ecephys]", "pynwb"], "tests": ["pre-commit", "pytest", "pytest-cov"], + "spikingcircus": ["hdbscan", "numba"], }, ) From be5135e05ba40bb2054a18e9b00486ea1ba411d0 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 13 Feb 2024 16:00:47 -0600 Subject: [PATCH 070/204] refactor: fix typo & black formatting --- element_array_ephys/ephys_no_curation.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 76fdeefc..b105f8f8 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -35,7 +35,7 @@ def activate( Args: ephys_schema_name (str): A string containing the name of the ephys schema. - probe_schema_name (str): A string containing the name of the probe scehma. + probe_schema_name (str): A string containing the name of the probe schema. create_schema (bool): If True, schema will be created in the database. create_tables (bool): If True, tables related to the schema will be created in the database. linking_module (str): A string containing the module name or module containing the required dependencies to activate the schema. @@ -1174,11 +1174,11 @@ def make(self, key): we: si.WaveformExtractor = si.load_waveforms( output_dir / "waveform", with_recording=False ) - unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( - si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices - ) # {unit: peak_channel_index} + unit_id_to_peak_channel_indices: dict[ + int, np.ndarray + ] = si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices # {unit: peak_channel_index} units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit") From 4b6fc0e9fe45f2a6b44be466f0731faad301734c Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 14 Feb 2024 20:10:56 -0600 Subject: [PATCH 071/204] feat: :sparkles: add EphysRecording.Channel part table --- element_array_ephys/ephys_no_curation.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index b105f8f8..25b2c147 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -284,6 +284,15 @@ class EphysRecording(dj.Imported): recording_duration: float # (seconds) duration of the recording from this probe """ + class Channel(dj.Part): + definitoin = """ + -> master + channel_idx: int # channel index + --- + -> probe.ElectrodeConfig.Electrode + channel_name="": varchar(64) + """ + class EphysFile(dj.Part): """Paths of electrophysiology recording files for each insertion. From 48025112da9acaab8b6e043b8da56e54a9e9725d Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 14:25:45 -0600 Subject: [PATCH 072/204] fix: :bug: fix get_logger missing error --- element_array_ephys/__init__.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/element_array_ephys/__init__.py b/element_array_ephys/__init__.py index 1c0c7285..3a0e5af6 100644 --- a/element_array_ephys/__init__.py +++ b/element_array_ephys/__init__.py @@ -1 +1,22 @@ +""" +isort:skip_file +""" + +import logging +import os + +import datajoint as dj + + +__all__ = ["ephys", "get_logger"] + +dj.config["enable_python_native_blobs"] = True + + +def get_logger(name): + log = logging.getLogger(name) + log.setLevel(os.getenv("LOGLEVEL", "INFO")) + return log + + from . import ephys_acute as ephys From bca67b3e47138cd2d7eaaf6c0b892557ff576786 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 21:03:28 +0000 Subject: [PATCH 073/204] fix typo & remove sorter_name --- element_array_ephys/ephys_no_curation.py | 2 +- element_array_ephys/spike_sorting/si_spike_sorting.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 25b2c147..5894fe16 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -285,7 +285,7 @@ class EphysRecording(dj.Imported): """ class Channel(dj.Part): - definitoin = """ + definition = """ -> master channel_idx: int # channel index --- diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 432b6c10..461987e6 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -191,7 +191,6 @@ class SIClustering(dj.Imported): definition = """ -> PreProcessing - sorter_name: varchar(30) # name of the sorter used --- execution_time: datetime # datetime of the start of this step execution_duration: float # execution duration in hours @@ -231,7 +230,6 @@ def make(self, key): self.insert1( { **key, - "sorter_name": sorter_name, "execution_time": execution_time, "execution_duration": ( datetime.utcnow() - execution_time From 1faa8f2e0e7f78f1097a220e4282ea4f8d6e7d1b Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 21:08:15 +0000 Subject: [PATCH 074/204] feat: :sparkles: add memoized_result implementation in SIClustering --- .../spike_sorting/si_spike_sorting.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 461987e6..6acd5a2b 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -26,7 +26,7 @@ import probeinterface as pi import spikeinterface as si from element_array_ephys import get_logger, probe, readers -from element_interface.utils import find_full_path +from element_interface.utils import find_full_path, memoized_result from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import si_preprocessing @@ -213,7 +213,17 @@ def make(self, key): ) # Run sorting - si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( + @memoized_result( + parameters={**key, **params}, + output_directory=output_dir / sorter_name, + ) + def _run_sorter(*args, **kwargs): + si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter(*args, **kwargs) + sorting_save_path = output_dir / sorter_name / "si_sorting.pkl" + si_sorting.dump_to_pickle(sorting_save_path) + return sorting_save_path + + sorting_save_path = _run_sorter( sorter_name=sorter_name, recording=si_recording, output_folder=output_dir / sorter_name, From 58f3a4453a821e67bef49971d74f4e97d9f97ef2 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 22:40:01 +0000 Subject: [PATCH 075/204] create a folder for storing recording pickle object --- .../spike_sorting/si_spike_sorting.py | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 6acd5a2b..3205d056 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -95,11 +95,16 @@ def make(self, key): execution_time = datetime.utcnow() # Set the output directory - acq_software, output_dir, params = ( + clustering_method, acq_software, output_dir, params = ( ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_output_dir", "params") - - for req_key in ( + ).fetch1("clustering_method", "acq_software", "clustering_output_dir", "params") + + # Get sorter method and create output directory. + sorter_name = ( + "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method + ) + + for required_key in ( "SI_SORTING_PARAMS", "SI_PREPROCESSING_METHOD", "SI_WAVEFORM_EXTRACTION_PARAMS", @@ -110,6 +115,7 @@ def make(self, key): f"{req_key} must be defined in ClusteringParamSet for SpikeInterface execution" ) + # Set directory to store recording file. if not output_dir: output_dir = ephys.ClusteringTask.infer_output_dir( key, relative=True, mkdir=True @@ -118,11 +124,11 @@ def make(self, key): ephys.ClusteringTask.update1( {**key, "clustering_output_dir": output_dir.as_posix()} ) - output_dir = pathlib.Path(output_dir) - output_full_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + recording_dir = output_dir / sorter_name / "recording" + recording_dir.mkdir(parents=True, exist_ok=True) recording_file = ( - output_full_dir / "si_recording.pkl" + recording_dir / "si_recording.pkl" ) # recording cache to be created for each key # Create SI recording extractor object From 4fcea517577095232ef317e4a77421e3181b3632 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 22:41:31 +0000 Subject: [PATCH 076/204] install element_interface from datajoint upstream --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ebf5d114..532c72f6 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ "elements": [ "element-animal @ git+https://github.com/datajoint/element-animal.git", "element-event @ git+https://github.com/datajoint/element-event.git", - "element-interface @ git+https://github.com/datajoint/element-interface.git", + "element-interface @ git+https://github.com/datajoint/element-interface.git@dev_memoized_results", "element-lab @ git+https://github.com/datajoint/element-lab.git", "element-session @ git+https://github.com/datajoint/element-session.git", ], From 4f0e0204cd5b6c31d78576dd3c3ff9b88c5598c3 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 22:52:49 +0000 Subject: [PATCH 077/204] add required_key for parameters --- element_array_ephys/spike_sorting/si_spike_sorting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 3205d056..d09a2c6b 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -110,9 +110,9 @@ def make(self, key): "SI_WAVEFORM_EXTRACTION_PARAMS", "SI_QUALITY_METRICS_PARAMS", ): - if req_key not in params: + if required_key not in params: raise ValueError( - f"{req_key} must be defined in ClusteringParamSet for SpikeInterface execution" + f"{required_key} must be defined in ClusteringParamSet for SpikeInterface execution" ) # Set directory to store recording file. From 83e7a166c18e7b20d83e05a6c050ff02bde9b74e Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 22:55:58 +0000 Subject: [PATCH 078/204] set recording channel info --- element_array_ephys/spike_sorting/si_spike_sorting.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index d09a2c6b..6bf3c4bd 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -165,11 +165,10 @@ def make(self, key): .fetch(format="frame") .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] ) - channels_details = ephys.get_recording_channels_details(key) - + # Create SI probe object si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) - si_probe.set_device_channel_indices(channels_details["channel_ind"]) + si_probe.set_device_channel_indices(range(len(electrodes_df))) si_recording.set_probe(probe=si_probe, in_place=True) # Run preprocessing and save results to output folder From 1d18a39cf22d81d3ce74aa591c5c8f4138895b01 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 22:57:55 +0000 Subject: [PATCH 079/204] fix loading preprocessor --- .../spike_sorting/si_preprocessing.py | 56 ++----------------- .../spike_sorting/si_spike_sorting.py | 4 +- 2 files changed, 5 insertions(+), 55 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_preprocessing.py b/element_array_ephys/spike_sorting/si_preprocessing.py index 2edf443d..07a49293 100644 --- a/element_array_ephys/spike_sorting/si_preprocessing.py +++ b/element_array_ephys/spike_sorting/si_preprocessing.py @@ -2,7 +2,7 @@ from spikeinterface import preprocessing -def mimic_catGT(recording): +def catGT(recording): recording = si.preprocessing.phase_shift(recording) recording = si.preprocessing.common_reference( recording, operator="median", reference="global" @@ -10,7 +10,7 @@ def mimic_catGT(recording): return recording -def mimic_IBLdestriping(recording): +def IBLdestriping(recording): # From International Brain Laboratory. “Spike sorting pipeline for the International Brain Laboratory”. 4 May 2022. 9 Jun 2022. recording = si.preprocessing.highpass_filter(recording, freq_min=400.0) bad_channel_ids, channel_labels = si.preprocessing.detect_bad_channels(recording) @@ -24,7 +24,7 @@ def mimic_IBLdestriping(recording): return recording -def mimic_IBLdestriping_modified(recording): +def IBLdestriping_modified(recording): # From SpikeInterface Implementation (https://spikeinterface.readthedocs.io/en/latest/how_to/analyse_neuropixels.html) recording = si.preprocessing.highpass_filter(recording, freq_min=400.0) bad_channel_ids, channel_labels = si.preprocessing.detect_bad_channels(recording) @@ -34,52 +34,4 @@ def mimic_IBLdestriping_modified(recording): recording = si.preprocessing.common_reference( recording, operator="median", reference="global" ) - return recording - - -preprocessing_function_mapping = { - "catGT": mimic_catGT, - "IBLdestriping": mimic_IBLdestriping, - "IBLdestriping_modified": mimic_IBLdestriping_modified, -} - - -## Example SI parameter set -""" -{'detect_threshold': 6, - 'projection_threshold': [10, 4], - 'preclust_threshold': 8, - 'car': True, - 'minFR': 0.02, - 'minfr_goodchannels': 0.1, - 'nblocks': 5, - 'sig': 20, - 'freq_min': 150, - 'sigmaMask': 30, - 'nPCs': 3, - 'ntbuff': 64, - 'nfilt_factor': 4, - 'NT': None, - 'do_correction': True, - 'wave_length': 61, - 'keep_good_only': False, - 'PreProcessing_params': {'Filter': False, - 'BandpassFilter': True, - 'HighpassFilter': False, - 'NotchFilter': False, - 'NormalizeByQuantile': False, - 'Scale': False, - 'Center': False, - 'ZScore': False, - 'Whiten': False, - 'CommonReference': False, - 'PhaseShift': False, - 'Rectify': False, - 'Clip': False, - 'BlankSaturation': False, - 'RemoveArtifacts': False, - 'RemoveBadChannels': False, - 'ZeroChannelPad': False, - 'DeepInterpolation': False, - 'Resample': False}} -""" + return recording \ No newline at end of file diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 6bf3c4bd..c8d2f1b7 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -172,9 +172,7 @@ def make(self, key): si_recording.set_probe(probe=si_probe, in_place=True) # Run preprocessing and save results to output folder - si_preproc_func = si_preprocessing.preprocessing_function_mapping[ - params["SI_PREPROCESSING_METHOD"] - ] + si_preproc_func = getattr(si_preprocessing, params["SI_PREPROCESSING_METHOD"]) si_recording = si_preproc_func(si_recording) si_recording.dump_to_pickle(file_path=recording_file) From b0a863fec91f82a1171ae2c7041f76cf7dc612aa Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 16 Feb 2024 23:18:42 +0000 Subject: [PATCH 080/204] make all output dir non-sharable --- .../spike_sorting/si_spike_sorting.py | 40 +++++++++++-------- 1 file changed, 23 insertions(+), 17 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index c8d2f1b7..13f569e7 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -207,39 +207,35 @@ def make(self, key): ephys.ClusteringTask * ephys.ClusteringParamSet & key ).fetch1("clustering_method", "clustering_output_dir", "params") output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - recording_file = output_dir / "si_recording.pkl" - si_recording: si.BaseRecording = si.load_extractor(recording_file) # Get sorter method and create output directory. sorter_name = ( "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) - + recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" + si_recording: si.BaseRecording = si.load_extractor(recording_file) + # Run sorting @memoized_result( parameters={**key, **params}, - output_directory=output_dir / sorter_name, + output_directory=output_dir / sorter_name / "spike_sorting", ) def _run_sorter(*args, **kwargs): si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter(*args, **kwargs) - sorting_save_path = output_dir / sorter_name / "si_sorting.pkl" + sorting_save_path = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" si_sorting.dump_to_pickle(sorting_save_path) return sorting_save_path sorting_save_path = _run_sorter( sorter_name=sorter_name, recording=si_recording, - output_folder=output_dir / sorter_name, + output_folder=output_dir / sorter_name / "spike_sorting", remove_existing_folder=True, verbose=True, docker_image=True, **params.get("SI_SORTING_PARAMS", {}), ) - # Run sorting - sorting_save_path = output_dir / "si_sorting.pkl" - si_sorting.dump_to_pickle(sorting_save_path) - self.insert1( { **key, @@ -266,13 +262,20 @@ class PostProcessing(dj.Imported): def make(self, key): execution_time = datetime.utcnow() - # Load sorting & recording object. - output_dir, params = (ephys.ClusteringTask & key).fetch1( - "clustering_output_dir", "params" + # Load recording object. + clustering_method, output_dir, params = ( + ephys.ClusteringTask * ephys.ClusteringParamSet & key + ).fetch1("clustering_method", "clustering_output_dir", "params") + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + + # Get sorter method and create output directory. + sorter_name = ( + "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - recording_file = output_dir / "si_recording.pkl" - sorting_file = output_dir / "si_sorting.pkl" + recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" + sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file) @@ -281,7 +284,7 @@ def make(self, key): we: si.WaveformExtractor = si.extract_waveforms( si_recording, si_sorting, - folder=output_dir / "waveform", # The folder where waveforms are cached + folder=output_dir / sorter_name / "waveform", # The folder where waveforms are cached max_spikes_per_unit=None, overwrite=True, **params.get("SI_WAVEFORM_EXTRACTION_PARAMS", {}), @@ -309,8 +312,11 @@ def make(self, key): waveform_extractor=we, **params.get("SI_QUALITY_METRICS_PARAMS", None) ) # Save the output (metrics.csv to the output dir) + metrics_output_dir = output_dir / sorter_name / "metrics" + metrics_output_dir.mkdir(parents=True, exist_ok=True) + metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) - metrics.to_csv(output_dir / "metrics.csv") + metrics.to_csv(metrics_output_dir / "metrics.csv") # Save results self.insert1( From 7d28351baa629e1a72e0d9a52c67f0d6590af689 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 19 Feb 2024 15:30:30 -0600 Subject: [PATCH 081/204] refactor & accept changes from code review --- element_array_ephys/ephys_no_curation.py | 12 +++++----- .../spike_sorting/si_preprocessing.py | 2 +- .../spike_sorting/si_spike_sorting.py | 22 +++++++++++-------- 3 files changed, 20 insertions(+), 16 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 5894fe16..acf7c76f 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -989,7 +989,7 @@ def make(self, key): extremum_channel_inds=unit_peak_channel_map ) - # Get electrode info + # Get electrode info !#TODO: need to be modified electrode_config_key = ( EphysRecording * probe.ElectrodeConfig & key ).fetch1("KEY") @@ -1183,11 +1183,11 @@ def make(self, key): we: si.WaveformExtractor = si.load_waveforms( output_dir / "waveform", with_recording=False ) - unit_id_to_peak_channel_indices: dict[ - int, np.ndarray - ] = si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices # {unit: peak_channel_index} + unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( + si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices + ) # {unit: peak_channel_index} units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit") diff --git a/element_array_ephys/spike_sorting/si_preprocessing.py b/element_array_ephys/spike_sorting/si_preprocessing.py index 07a49293..4db5f303 100644 --- a/element_array_ephys/spike_sorting/si_preprocessing.py +++ b/element_array_ephys/spike_sorting/si_preprocessing.py @@ -34,4 +34,4 @@ def IBLdestriping_modified(recording): recording = si.preprocessing.common_reference( recording, operator="median", reference="global" ) - return recording \ No newline at end of file + return recording diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 13f569e7..1b8366dc 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -26,7 +26,7 @@ import probeinterface as pi import spikeinterface as si from element_array_ephys import get_logger, probe, readers -from element_interface.utils import find_full_path, memoized_result +from element_interface.utils import find_full_path # , memoized_result from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import si_preprocessing @@ -98,12 +98,12 @@ def make(self, key): clustering_method, acq_software, output_dir, params = ( ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key ).fetch1("clustering_method", "acq_software", "clustering_output_dir", "params") - + # Get sorter method and create output directory. sorter_name = ( "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) - + for required_key in ( "SI_SORTING_PARAMS", "SI_PREPROCESSING_METHOD", @@ -165,7 +165,7 @@ def make(self, key): .fetch(format="frame") .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] ) - + # Create SI probe object si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) si_probe.set_device_channel_indices(range(len(electrodes_df))) @@ -214,7 +214,7 @@ def make(self, key): ) recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) - + # Run sorting @memoized_result( parameters={**key, **params}, @@ -222,7 +222,9 @@ def make(self, key): ) def _run_sorter(*args, **kwargs): si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter(*args, **kwargs) - sorting_save_path = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" + sorting_save_path = ( + output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" + ) si_sorting.dump_to_pickle(sorting_save_path) return sorting_save_path @@ -272,7 +274,7 @@ def make(self, key): sorter_name = ( "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) - + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" @@ -284,7 +286,9 @@ def make(self, key): we: si.WaveformExtractor = si.extract_waveforms( si_recording, si_sorting, - folder=output_dir / sorter_name / "waveform", # The folder where waveforms are cached + folder=output_dir + / sorter_name + / "waveform", # The folder where waveforms are cached max_spikes_per_unit=None, overwrite=True, **params.get("SI_WAVEFORM_EXTRACTION_PARAMS", {}), @@ -314,7 +318,7 @@ def make(self, key): # Save the output (metrics.csv to the output dir) metrics_output_dir = output_dir / sorter_name / "metrics" metrics_output_dir.mkdir(parents=True, exist_ok=True) - + metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) metrics.to_csv(metrics_output_dir / "metrics.csv") From 95f5286704ca51a8768a1c8bad41d2ae2de94767 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 19 Feb 2024 16:50:15 -0600 Subject: [PATCH 082/204] remove memoized_result for testing --- .../spike_sorting/si_spike_sorting.py | 21 +++++++------------ 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 1b8366dc..0e3da684 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -26,7 +26,7 @@ import probeinterface as pi import spikeinterface as si from element_array_ephys import get_logger, probe, readers -from element_interface.utils import find_full_path # , memoized_result +from element_interface.utils import find_full_path from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import si_preprocessing @@ -216,19 +216,7 @@ def make(self, key): si_recording: si.BaseRecording = si.load_extractor(recording_file) # Run sorting - @memoized_result( - parameters={**key, **params}, - output_directory=output_dir / sorter_name / "spike_sorting", - ) - def _run_sorter(*args, **kwargs): - si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter(*args, **kwargs) - sorting_save_path = ( - output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" - ) - si_sorting.dump_to_pickle(sorting_save_path) - return sorting_save_path - - sorting_save_path = _run_sorter( + si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( sorter_name=sorter_name, recording=si_recording, output_folder=output_dir / sorter_name / "spike_sorting", @@ -238,6 +226,11 @@ def _run_sorter(*args, **kwargs): **params.get("SI_SORTING_PARAMS", {}), ) + sorting_save_path = ( + output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" + ) + si_sorting.dump_to_pickle(sorting_save_path) + self.insert1( { **key, From a7ebb9a61c6c2e90fd01d7f5529b8409b36889c3 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 20 Feb 2024 17:59:46 -0600 Subject: [PATCH 083/204] build: :heavy_plus_sign: add element-interface to required packages --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 532c72f6..204008a6 100644 --- a/setup.py +++ b/setup.py @@ -39,12 +39,12 @@ "scikit-image", "nbformat>=4.2.0", "pyopenephys>=1.1.6", + "element-interface @ git+https://github.com/datajoint/element-interface.git", ], extras_require={ "elements": [ "element-animal @ git+https://github.com/datajoint/element-animal.git", "element-event @ git+https://github.com/datajoint/element-event.git", - "element-interface @ git+https://github.com/datajoint/element-interface.git@dev_memoized_results", "element-lab @ git+https://github.com/datajoint/element-lab.git", "element-session @ git+https://github.com/datajoint/element-session.git", ], From 134ff54eb124896f6fd70f5d335680ed7c2c9a06 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 20 Feb 2024 18:00:07 -0600 Subject: [PATCH 084/204] update pre-commit with the latest hooks --- .pre-commit-config.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0d513df7..6d28ef11 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ exclude: (^.github/|^docs/|^images/) repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v4.5.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -16,7 +16,7 @@ repos: # black - repo: https://github.com/psf/black - rev: 22.12.0 + rev: 24.2.0 hooks: - id: black - id: black-jupyter @@ -25,7 +25,7 @@ repos: # isort - repo: https://github.com/pycqa/isort - rev: 5.11.2 + rev: 5.13.2 hooks: - id: isort args: ["--profile", "black"] @@ -33,7 +33,7 @@ repos: # flake8 - repo: https://github.com/pycqa/flake8 - rev: 4.0.1 + rev: 7.0.0 hooks: - id: flake8 args: # arguments to configure flake8 From 79724268bd3b0c29f129b2fc3781577a931eb098 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 27 Feb 2024 17:25:37 -0600 Subject: [PATCH 085/204] build: :heavy_plus_sign: Add numba as required package --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 204008a6..52cd38b1 100644 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ "nbformat>=4.2.0", "pyopenephys>=1.1.6", "element-interface @ git+https://github.com/datajoint/element-interface.git", + "numba", ], extras_require={ "elements": [ @@ -50,6 +51,6 @@ ], "nwb": ["dandi", "neuroconv[ecephys]", "pynwb"], "tests": ["pre-commit", "pytest", "pytest-cov"], - "spikingcircus": ["hdbscan", "numba"], + "spikingcircus": ["hdbscan"], }, ) From ed11526a00649cf6b1849802720c151a33beb374 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 27 Feb 2024 17:26:24 -0600 Subject: [PATCH 086/204] adjust extract_waveforms parameters --- element_array_ephys/spike_sorting/si_spike_sorting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 0e3da684..6df25de8 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -282,8 +282,8 @@ def make(self, key): folder=output_dir / sorter_name / "waveform", # The folder where waveforms are cached - max_spikes_per_unit=None, overwrite=True, + allow_unfiltered=True, **params.get("SI_WAVEFORM_EXTRACTION_PARAMS", {}), **params.get("SI_JOB_KWARGS", {"n_jobs": -1, "chunk_size": 30000}), ) From bab86b7c1471f02e2c9d95c1f4ee8442345b3817 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Wed, 28 Feb 2024 22:52:28 -0600 Subject: [PATCH 087/204] refactor: :recycle: update the output dir for CuratedClustering --- element_array_ephys/ephys_no_curation.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index acf7c76f..71648fec 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -967,22 +967,31 @@ class Unit(dj.Part): def make(self, key): """Automated population of Unit information.""" - output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") + clustering_method, output_dir = ( + ClusteringTask * ClusteringParamSet & key + ).fetch1("clustering_method", "clustering_output_dir") output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - if (output_dir / "waveform").exists(): # read from spikeinterface outputs + # Get sorter method and create output directory. + sorter_name = ( + "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method + ) + waveform_dir = output_dir / sorter_name / "waveform" + sorting_dir = output_dir / sorter_name / "spike_sorting" + + if waveform_dir.exists(): # read from spikeinterface outputs we: si.WaveformExtractor = si.load_waveforms( - output_dir / "waveform", with_recording=False + waveform_dir, with_recording=False ) si_sorting: si.sorters.BaseSorter = si.load_extractor( - output_dir / "sorting.pkl" + sorting_dir / "si_sorting.pkl" ) unit_peak_channel_map: dict[int, int] = si.get_template_extremum_channel( we, outputs="index" ) # {unit: peak_channel_index} - spike_count_dict = dict[int, int] = si_sorting.count_num_spikes_per_unit() + spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() # {unit: spike_count} spikes = si_sorting.to_spike_vector( From 0898ce5cdaeb7656ed7142f395351dfcde652d40 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Thu, 29 Feb 2024 10:08:22 -0600 Subject: [PATCH 088/204] feat: :sparkles: add quality label mapping --- element_array_ephys/ephys_no_curation.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 71648fec..cb91baa9 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1011,6 +1011,21 @@ def make(self, key): zip(*electrode_query.fetch("channel", "electrode")) ) + # Get unit id to quality label mapping + cluster_quality_label_map = {} + try: + cluster_quality_label_map = pd.read_csv( + sorting_dir / "sorter_output" / "cluster_KSLabel.tsv", + delimiter="\t", + ) + cluster_quality_label_map: dict[ + int, str + ] = cluster_quality_label_map.set_index("cluster_id")[ + "KSLabel" + ].to_dict() # {unit: quality_label} + except FileNotFoundError: + pass + # Get channel to electrode mapping channel2depth_map = dict(zip(*electrode_query.fetch("channel", "y_coord"))) @@ -1038,7 +1053,9 @@ def make(self, key): units.append( { "unit": unit_id, - "cluster_quality_label": "n.a.", + "cluster_quality_label": cluster_quality_label_map.get( + unit_id, "n.a." + ), "spike_times": si_sorting.get_unit_spike_train( unit_id, return_times=True ), From 727af24cca6f00fc35651f88f7f258d2ab67e43e Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 10:19:09 -0600 Subject: [PATCH 089/204] feat: :sparkles: Ingest EphysRecording.Channel --- element_array_ephys/ephys_no_curation.py | 242 +++++++++++++---------- 1 file changed, 133 insertions(+), 109 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index cb91baa9..e9adc290 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -8,7 +8,9 @@ import datajoint as dj import numpy as np import pandas as pd +import spikeinterface as si from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory +from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import ephys_report, probe from .readers import kilosort, openephys, spikeglx @@ -19,9 +21,6 @@ _linking_module = None -import spikeinterface as si -from spikeinterface import exporters, postprocessing, qualitymetrics, sorters - def activate( ephys_schema_name: str, @@ -327,129 +326,154 @@ def make(self, key): break else: raise FileNotFoundError( - "Ephys recording data not found!" + f"Ephys recording data not found! for {key}." "Neither SpikeGLX nor Open Ephys recording files found" ) - supported_probe_types = probe.ProbeType.fetch("probe_type") + if acq_software not in AcquisitionSoftware.fetch("acq_software"): + raise NotImplementedError( + f"Processing ephys files from acquisition software of type {acq_software} is not yet implemented." + ) - if acq_software == "SpikeGLX": - for meta_filepath in ephys_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format(key) - ) + else: + supported_probe_types = probe.ProbeType.fetch("probe_type") + + if acq_software == "SpikeGLX": + for meta_filepath in ephys_meta_filepaths: + spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) + if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: + break + else: + raise FileNotFoundError( + "No SpikeGLX data found for probe insertion: {}".format(key) + ) + + if spikeglx_meta.probe_model in supported_probe_types: + probe_type = spikeglx_meta.probe_model + electrode_query = probe.ProbeType.Electrode & { + "probe_type": probe_type + } - if spikeglx_meta.probe_model in supported_probe_types: - probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} + probe_electrodes = { + (shank, shank_col, shank_row): key + for key, shank, shank_col, shank_row in zip( + *electrode_query.fetch( + "KEY", "shank", "shank_col", "shank_row" + ) + ) + } - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") + electrode_group_members = [ + probe_electrodes[(shank, shank_col, shank_row)] + for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap[ + "data" + ] + ] + else: + raise NotImplementedError( + "Processing for neuropixels probe model" + " {} not yet implemented".format(spikeglx_meta.probe_model) ) - } - electrode_group_members = [ - probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels probe model" - " {} not yet implemented".format(spikeglx_meta.probe_model) + self.insert1( + { + **key, + **generate_electrode_config( + probe_type, electrode_group_members + ), + "acq_software": acq_software, + "sampling_rate": spikeglx_meta.meta["imSampRate"], + "recording_datetime": spikeglx_meta.recording_time, + "recording_duration": ( + spikeglx_meta.recording_duration + or spikeglx.retrieve_recording_duration(meta_filepath) + ), + } ) - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": spikeglx_meta.meta["imSampRate"], - "recording_datetime": spikeglx_meta.recording_time, - "recording_duration": ( - spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(meta_filepath) - ), - } - ) - - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) - self.EphysFile.insert1( - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} - ) - elif acq_software == "Open Ephys": - dataset = openephys.OpenEphys(session_dir) - for serial_number, probe_data in dataset.probes.items(): - if str(serial_number) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No Open Ephys data found for probe insertion: {}".format(key) + root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) + self.EphysFile.insert1( + {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} ) + elif acq_software == "Open Ephys": + dataset = openephys.OpenEphys(session_dir) + for serial_number, probe_data in dataset.probes.items(): + if str(serial_number) == inserted_probe_serial_number: + break + else: + raise FileNotFoundError( + "No Open Ephys data found for probe insertion: {}".format(key) + ) - if not probe_data.ap_meta: - raise IOError( - 'No analog signals found - check "structure.oebin" file or "continuous" directory' - ) + if not probe_data.ap_meta: + raise IOError( + 'No analog signals found - check "structure.oebin" file or "continuous" directory' + ) - if probe_data.probe_model in supported_probe_types: - probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} + if probe_data.probe_model in supported_probe_types: + probe_type = probe_data.probe_model + electrode_query = probe.ProbeType.Electrode & { + "probe_type": probe_type + } - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } + probe_electrodes = { + key["electrode"]: key for key in electrode_query.fetch("KEY") + } - electrode_group_members = [ - probe_electrodes[channel_idx] - for channel_idx in probe_data.ap_meta["channels_indices"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels" - " probe model {} not yet implemented".format(probe_data.probe_model) + electrode_group_members = [ + probe_electrodes[channel_idx] + for channel_idx in probe_data.ap_meta["channels_indices"] + ] + else: + raise NotImplementedError( + "Processing for neuropixels" + " probe model {} not yet implemented".format( + probe_data.probe_model + ) + ) + + self.insert1( + { + **key, + **generate_electrode_config( + probe_type, electrode_group_members + ), + "acq_software": acq_software, + "sampling_rate": probe_data.ap_meta["sample_rate"], + "recording_datetime": probe_data.recording_info[ + "recording_datetimes" + ][0], + "recording_duration": np.sum( + probe_data.recording_info["recording_durations"] + ), + } ) - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": probe_data.ap_meta["sample_rate"], - "recording_datetime": probe_data.recording_info[ - "recording_datetimes" - ][0], - "recording_duration": np.sum( - probe_data.recording_info["recording_durations"] - ), - } - ) + root_dir = find_root_directory( + get_ephys_root_data_dir(), + probe_data.recording_info["recording_files"][0], + ) + self.EphysFile.insert( + [ + {**key, "file_path": fp.relative_to(root_dir).as_posix()} + for fp in probe_data.recording_info["recording_files"] + ] + ) + # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough + del probe_data, dataset + gc.collect() - root_dir = find_root_directory( - get_ephys_root_data_dir(), - probe_data.recording_info["recording_files"][0], + # Insert channel information + # Get channel and electrode-site mapping + channel2electrodes = get_neuropixels_channel2electrode_map( + key, acq_software ) - self.EphysFile.insert( + self.Channel.insert( [ - {**key, "file_path": fp.relative_to(root_dir).as_posix()} - for fp in probe_data.recording_info["recording_files"] + {**key, "channel_idx": channel_idx, **channel_info} + for channel_idx, channel_info in channel2electrodes.items() ] ) - # explicitly garbage collect "dataset" - # as these may have large memory footprint and may not be cleared fast enough - del probe_data, dataset - gc.collect() - else: - raise NotImplementedError( - f"Processing ephys files from" - f" acquisition software of type {acq_software} is" - f" not yet implemented" - ) @schema @@ -1209,11 +1233,11 @@ def make(self, key): we: si.WaveformExtractor = si.load_waveforms( output_dir / "waveform", with_recording=False ) - unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( - si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices - ) # {unit: peak_channel_index} + unit_id_to_peak_channel_indices: dict[ + int, np.ndarray + ] = si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices # {unit: peak_channel_index} units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit") From 1df41ea6ee8668c5b3ca5cb970dad024f65a668c Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 11:05:05 -0600 Subject: [PATCH 090/204] get channel to electrode mapping in CuratedClustering --- element_array_ephys/ephys_no_curation.py | 30 +++++++++++++----------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index e9adc290..c313684f 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -9,7 +9,8 @@ import numpy as np import pandas as pd import spikeinterface as si -from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory +from element_interface.utils import (dict_to_uuid, find_full_path, + find_root_directory) from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import ephys_report, probe @@ -1022,7 +1023,7 @@ def make(self, key): extremum_channel_inds=unit_peak_channel_map ) - # Get electrode info !#TODO: need to be modified + # Get electrode & channel info electrode_config_key = ( EphysRecording * probe.ElectrodeConfig & key ).fetch1("KEY") @@ -1030,10 +1031,11 @@ def make(self, key): electrode_query = ( probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode & electrode_config_key - ) + ) * (dj.U("electrode", "channel_idx") & EphysRecording.Channel) + channel2electrode_map = dict( - zip(*electrode_query.fetch("channel", "electrode")) - ) + zip(*electrode_query.fetch("channel_idx", "electrode")) + ) # {channel: electrode} # Get unit id to quality label mapping cluster_quality_label_map = {} @@ -1051,24 +1053,24 @@ def make(self, key): pass # Get channel to electrode mapping - channel2depth_map = dict(zip(*electrode_query.fetch("channel", "y_coord"))) + channel2depth_map = dict(zip(*electrode_query.fetch("channel_idx", "y_coord"))) # {channel: depth} peak_electrode_ind = np.array( [ channel2electrode_map[unit_peak_channel_map[unit_id]] for unit_id in si_sorting.unit_ids ] - ) + ) # get the electrode where peak unit activity is recorded # Get channel to depth mapping - electrode_depth_ind = np.array( + channel_depth_ind = np.array( [ channel2depth_map[unit_peak_channel_map[unit_id]] for unit_id in si_sorting.unit_ids ] ) spikes["electrode"] = peak_electrode_ind[spikes["unit_index"]] - spikes["depth"] = electrode_depth_ind[spikes["unit_index"]] + spikes["depth"] = channel_depth_ind[spikes["unit_index"]] units = [] @@ -1233,11 +1235,11 @@ def make(self, key): we: si.WaveformExtractor = si.load_waveforms( output_dir / "waveform", with_recording=False ) - unit_id_to_peak_channel_indices: dict[ - int, np.ndarray - ] = si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices # {unit: peak_channel_index} + unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( + si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices + ) # {unit: peak_channel_index} units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit") From 417219fd4baeeadc1d9e4feeaa29ef04c4b21555 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 12:10:29 -0600 Subject: [PATCH 091/204] refactor: :recycle: Fix metrics directory in QualityMetrics --- element_array_ephys/ephys_no_curation.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index c313684f..70525451 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -9,8 +9,7 @@ import numpy as np import pandas as pd import spikeinterface as si -from element_interface.utils import (dict_to_uuid, find_full_path, - find_root_directory) +from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import ephys_report, probe @@ -1032,7 +1031,7 @@ def make(self, key): probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode & electrode_config_key ) * (dj.U("electrode", "channel_idx") & EphysRecording.Channel) - + channel2electrode_map = dict( zip(*electrode_query.fetch("channel_idx", "electrode")) ) # {channel: electrode} @@ -1053,7 +1052,9 @@ def make(self, key): pass # Get channel to electrode mapping - channel2depth_map = dict(zip(*electrode_query.fetch("channel_idx", "y_coord"))) # {channel: depth} + channel2depth_map = dict( + zip(*electrode_query.fetch("channel_idx", "y_coord")) + ) # {channel: depth} peak_electrode_ind = np.array( [ @@ -1570,9 +1571,14 @@ class Waveform(dj.Part): def make(self, key): """Populates tables with quality metrics data.""" # Load metrics.csv - output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") + clustering_method, output_dir = ( + ClusteringTask * ClusteringParamSet & key + ).fetch1("clustering_method", "clustering_output_dir") output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - metric_fp = output_dir / "metrics.csv" + sorter_name = ( + "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method + ) + metric_fp = output_dir / sorter_name / "metrics" / "metrics.csv" if not metric_fp.exists(): raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") metrics_df = pd.read_csv(metric_fp) From f70ae4ee1e294b0ba1173fa6a9b1255e2d27f6b3 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 13:06:22 -0600 Subject: [PATCH 092/204] feat: :sparkles: replace get_neuropixels_channel2electrode_map with channel_info --- element_array_ephys/ephys_no_curation.py | 54 ++++++++++++++++-------- 1 file changed, 36 insertions(+), 18 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 70525451..5730ebf3 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1096,7 +1096,7 @@ def make(self, key): } ) - else: + else: # read from kilosort outputs kilosort_dataset = kilosort.Kilosort(output_dir) acq_software, sample_rate = (EphysRecording & key).fetch1( "acq_software", "sampling_rate" @@ -1131,14 +1131,19 @@ def make(self, key): kilosort_dataset.extract_spike_depths() # Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map( - key, acq_software + channel_info = ( + (EphysRecording.Channel & key) + .proj(..., "-channel_name") + .fetch(as_dict=True, order_by="channel_idx") ) + channel_info: dict[int, dict] = { + ch.pop("channel_idx"): ch for ch in channel_info + } # e.g., {0: {'subject': 'sglx', 'session_id': 912231859, 'insertion_number': 1, 'electrode_config_hash': UUID('8d4cc6d8-a02d-42c8-bf27-7459c39ea0ee'), 'probe_type': 'neuropixels 1.0 - 3A', 'electrode': 0}} # -- Spike-sites and Spike-depths -- spike_sites = np.array( [ - channel2electrodes[s]["electrode"] + channel_info[s]["electrode"] for s in kilosort_dataset.data["spike_sites"] ] ) @@ -1157,9 +1162,10 @@ def make(self, key): units.append( { + **key, "unit": unit, "cluster_quality_label": unit_lbl, - **channel2electrodes[unit_channel], + **channel_info[unit_channel], "spike_times": unit_spike_times, "spike_count": spike_count, "spike_sites": spike_sites[ @@ -1228,13 +1234,21 @@ class Waveform(dj.Part): def make(self, key): """Populates waveform tables.""" - output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") + clustering_method, output_dir = ( + ClusteringTask * ClusteringParamSet & key + ).fetch1("clustering_method", "clustering_output_dir") output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) + sorter_name = ( + "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method + ) - if (output_dir / "waveform").exists(): # read from spikeinterface outputs + if ( + output_dir / sorter_name / "waveform" + ).exists(): # read from spikeinterface outputs + waveform_dir = output_dir / sorter_name / "waveform" we: si.WaveformExtractor = si.load_waveforms( - output_dir / "waveform", with_recording=False + waveform_dir, with_recording=False ) unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( si.ChannelSparsity.from_best_channels( @@ -1299,11 +1313,15 @@ def make(self, key): EphysRecording * ProbeInsertion & key ).fetch1("acq_software", "probe") - # -- Get channel and electrode-site mapping - recording_key = (EphysRecording & key).fetch1("KEY") - channel2electrodes = get_neuropixels_channel2electrode_map( - recording_key, acq_software + # Get channel and electrode-site mapping + channel_info = ( + (EphysRecording.Channel & key) + .proj(..., "-channel_name") + .fetch(as_dict=True, order_by="channel_idx") ) + channel_info: dict[int, dict] = { + ch.pop("channel_idx"): ch for ch in channel_info + } # e.g., {0: {'subject': 'sglx', 'session_id': 912231859, 'insertion_number': 1, 'electrode_config_hash': UUID('8d4cc6d8-a02d-42c8-bf27-7459c39ea0ee'), 'probe_type': 'neuropixels 1.0 - 3A', 'electrode': 0}} # Get all units units = { @@ -1335,12 +1353,12 @@ def yield_unit_waveforms(): unit_electrode_waveforms.append( { **units[unit_no], - **channel2electrodes[channel], + **channel_info[channel], "waveform_mean": channel_waveform, } ) if ( - channel2electrodes[channel]["electrode"] + channel_info[channel]["electrode"] == units[unit_no]["electrode"] ): unit_peak_waveform = { @@ -1377,12 +1395,12 @@ def yield_unit_waveforms(): unit_electrode_waveforms.append( { **units[unit_no], - **channel2electrodes[channel], + **channel_info[channel], "waveform_mean": channel_waveform, } ) if ( - channel2electrodes[channel]["electrode"] + channel_info[channel]["electrode"] == units[unit_no]["electrode"] ): unit_peak_waveform = { @@ -1451,13 +1469,13 @@ def yield_unit_waveforms(): unit_electrode_waveforms.append( { **unit_dict, - **channel2electrodes[channel], + **channel_info[channel], "waveform_mean": channel_waveform.mean(axis=0), "waveforms": channel_waveform, } ) if ( - channel2electrodes[channel]["electrode"] + channel_info[channel]["electrode"] == unit_dict["electrode"] ): unit_peak_waveform = { From 0ccbec962b5a11107d8fe28906e79bf19c693475 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 21:59:06 +0000 Subject: [PATCH 093/204] fix CuratedClustering make function --- element_array_ephys/ephys_no_curation.py | 36 +++++++++++++++++------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 5730ebf3..6ddd8fec 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1032,6 +1032,12 @@ def make(self, key): & electrode_config_key ) * (dj.U("electrode", "channel_idx") & EphysRecording.Channel) + channel_info = electrode_query.fetch(as_dict=True, order_by="channel_idx") + + channel_info: dict[int, dict] = { + ch.pop("channel_idx"): ch for ch in channel_info + } + channel2electrode_map = dict( zip(*electrode_query.fetch("channel_idx", "electrode")) ) # {channel: electrode} @@ -1058,7 +1064,7 @@ def make(self, key): peak_electrode_ind = np.array( [ - channel2electrode_map[unit_peak_channel_map[unit_id]] + channel_info[unit_peak_channel_map[unit_id]]["electrode"] for unit_id in si_sorting.unit_ids ] ) # get the electrode where peak unit activity is recorded @@ -1066,19 +1072,29 @@ def make(self, key): # Get channel to depth mapping channel_depth_ind = np.array( [ - channel2depth_map[unit_peak_channel_map[unit_id]] + channel_info[unit_peak_channel_map[unit_id]]["y_coord"] for unit_id in si_sorting.unit_ids ] ) - spikes["electrode"] = peak_electrode_ind[spikes["unit_index"]] - spikes["depth"] = channel_depth_ind[spikes["unit_index"]] + + # Assign electrode and depth for each spike + new_spikes = np.empty(spikes.shape, spikes.dtype.descr + [('electrode', ' Date: Fri, 1 Mar 2024 23:11:25 +0000 Subject: [PATCH 094/204] improve try except logic --- element_array_ephys/ephys_no_curation.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 6ddd8fec..b8c22f05 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1043,25 +1043,20 @@ def make(self, key): ) # {channel: electrode} # Get unit id to quality label mapping - cluster_quality_label_map = {} try: cluster_quality_label_map = pd.read_csv( sorting_dir / "sorter_output" / "cluster_KSLabel.tsv", delimiter="\t", ) + except FileNotFoundError: + cluster_quality_label_map = {} + else: cluster_quality_label_map: dict[ int, str ] = cluster_quality_label_map.set_index("cluster_id")[ "KSLabel" ].to_dict() # {unit: quality_label} - except FileNotFoundError: - pass - - # Get channel to electrode mapping - channel2depth_map = dict( - zip(*electrode_query.fetch("channel_idx", "y_coord")) - ) # {channel: depth} - + peak_electrode_ind = np.array( [ channel_info[unit_peak_channel_map[unit_id]]["electrode"] From 673faedc1c8232098888340bab687896cd89e7f1 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 23:14:45 +0000 Subject: [PATCH 095/204] docs: :memo: update comments in ephys_no_curation --- element_array_ephys/ephys_no_curation.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index b8c22f05..f4ba5b29 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -286,10 +286,10 @@ class EphysRecording(dj.Imported): class Channel(dj.Part): definition = """ -> master - channel_idx: int # channel index + channel_idx: int # channel index (index of the raw data) --- -> probe.ElectrodeConfig.Electrode - channel_name="": varchar(64) + channel_name="": varchar(64) # alias of the channel """ class EphysFile(dj.Part): @@ -1033,14 +1033,9 @@ def make(self, key): ) * (dj.U("electrode", "channel_idx") & EphysRecording.Channel) channel_info = electrode_query.fetch(as_dict=True, order_by="channel_idx") - channel_info: dict[int, dict] = { ch.pop("channel_idx"): ch for ch in channel_info } - - channel2electrode_map = dict( - zip(*electrode_query.fetch("channel_idx", "electrode")) - ) # {channel: electrode} # Get unit id to quality label mapping try: @@ -1056,15 +1051,16 @@ def make(self, key): ] = cluster_quality_label_map.set_index("cluster_id")[ "KSLabel" ].to_dict() # {unit: quality_label} - + + # Get electrode where peak unit activity is recorded peak_electrode_ind = np.array( [ channel_info[unit_peak_channel_map[unit_id]]["electrode"] for unit_id in si_sorting.unit_ids ] - ) # get the electrode where peak unit activity is recorded + ) - # Get channel to depth mapping + # Get channel depth channel_depth_ind = np.array( [ channel_info[unit_peak_channel_map[unit_id]]["y_coord"] @@ -1707,7 +1703,7 @@ def get_openephys_probe_data(ephys_recording_key: dict) -> list: def get_neuropixels_channel2electrode_map( ephys_recording_key: dict, acq_software: str -) -> dict: +) -> dict: #TODO: remove this function """Get the channel map for neuropixels probe.""" if acq_software == "SpikeGLX": spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) From 8ab4c58a9d5c744bc3c071f31f360873ff8ee8a2 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 23:15:21 +0000 Subject: [PATCH 096/204] refactor: :recycle: improve if else block in EphysRecording --- element_array_ephys/ephys_no_curation.py | 237 +++++++++++------------ 1 file changed, 118 insertions(+), 119 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index f4ba5b29..5d77a041 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -335,145 +335,144 @@ def make(self, key): f"Processing ephys files from acquisition software of type {acq_software} is not yet implemented." ) - else: - supported_probe_types = probe.ProbeType.fetch("probe_type") - - if acq_software == "SpikeGLX": - for meta_filepath in ephys_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format(key) - ) + supported_probe_types = probe.ProbeType.fetch("probe_type") - if spikeglx_meta.probe_model in supported_probe_types: - probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & { - "probe_type": probe_type - } + if acq_software == "SpikeGLX": + for meta_filepath in ephys_meta_filepaths: + spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) + if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: + break + else: + raise FileNotFoundError( + "No SpikeGLX data found for probe insertion: {}".format(key) + ) - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch( - "KEY", "shank", "shank_col", "shank_row" - ) - ) - } + if spikeglx_meta.probe_model in supported_probe_types: + probe_type = spikeglx_meta.probe_model + electrode_query = probe.ProbeType.Electrode & { + "probe_type": probe_type + } - electrode_group_members = [ - probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap[ - "data" - ] - ] - else: - raise NotImplementedError( - "Processing for neuropixels probe model" - " {} not yet implemented".format(spikeglx_meta.probe_model) + probe_electrodes = { + (shank, shank_col, shank_row): key + for key, shank, shank_col, shank_row in zip( + *electrode_query.fetch( + "KEY", "shank", "shank_col", "shank_row" + ) ) + } - self.insert1( - { - **key, - **generate_electrode_config( - probe_type, electrode_group_members - ), - "acq_software": acq_software, - "sampling_rate": spikeglx_meta.meta["imSampRate"], - "recording_datetime": spikeglx_meta.recording_time, - "recording_duration": ( - spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(meta_filepath) - ), - } + electrode_group_members = [ + probe_electrodes[(shank, shank_col, shank_row)] + for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap[ + "data" + ] + ] + else: + raise NotImplementedError( + "Processing for neuropixels probe model" + " {} not yet implemented".format(spikeglx_meta.probe_model) ) - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) - self.EphysFile.insert1( - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} + self.insert1( + { + **key, + **generate_electrode_config( + probe_type, electrode_group_members + ), + "acq_software": acq_software, + "sampling_rate": spikeglx_meta.meta["imSampRate"], + "recording_datetime": spikeglx_meta.recording_time, + "recording_duration": ( + spikeglx_meta.recording_duration + or spikeglx.retrieve_recording_duration(meta_filepath) + ), + } + ) + + root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) + self.EphysFile.insert1( + {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} + ) + elif acq_software == "Open Ephys": + dataset = openephys.OpenEphys(session_dir) + for serial_number, probe_data in dataset.probes.items(): + if str(serial_number) == inserted_probe_serial_number: + break + else: + raise FileNotFoundError( + "No Open Ephys data found for probe insertion: {}".format(key) ) - elif acq_software == "Open Ephys": - dataset = openephys.OpenEphys(session_dir) - for serial_number, probe_data in dataset.probes.items(): - if str(serial_number) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No Open Ephys data found for probe insertion: {}".format(key) - ) - if not probe_data.ap_meta: - raise IOError( - 'No analog signals found - check "structure.oebin" file or "continuous" directory' - ) + if not probe_data.ap_meta: + raise IOError( + 'No analog signals found - check "structure.oebin" file or "continuous" directory' + ) - if probe_data.probe_model in supported_probe_types: - probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & { - "probe_type": probe_type - } + if probe_data.probe_model in supported_probe_types: + probe_type = probe_data.probe_model + electrode_query = probe.ProbeType.Electrode & { + "probe_type": probe_type + } - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } + probe_electrodes = { + key["electrode"]: key for key in electrode_query.fetch("KEY") + } - electrode_group_members = [ - probe_electrodes[channel_idx] - for channel_idx in probe_data.ap_meta["channels_indices"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels" - " probe model {} not yet implemented".format( - probe_data.probe_model - ) + electrode_group_members = [ + probe_electrodes[channel_idx] + for channel_idx in probe_data.ap_meta["channels_indices"] + ] + else: + raise NotImplementedError( + "Processing for neuropixels" + " probe model {} not yet implemented".format( + probe_data.probe_model ) - - self.insert1( - { - **key, - **generate_electrode_config( - probe_type, electrode_group_members - ), - "acq_software": acq_software, - "sampling_rate": probe_data.ap_meta["sample_rate"], - "recording_datetime": probe_data.recording_info[ - "recording_datetimes" - ][0], - "recording_duration": np.sum( - probe_data.recording_info["recording_durations"] - ), - } ) - root_dir = find_root_directory( - get_ephys_root_data_dir(), - probe_data.recording_info["recording_files"][0], - ) - self.EphysFile.insert( - [ - {**key, "file_path": fp.relative_to(root_dir).as_posix()} - for fp in probe_data.recording_info["recording_files"] - ] - ) - # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough - del probe_data, dataset - gc.collect() + self.insert1( + { + **key, + **generate_electrode_config( + probe_type, electrode_group_members + ), + "acq_software": acq_software, + "sampling_rate": probe_data.ap_meta["sample_rate"], + "recording_datetime": probe_data.recording_info[ + "recording_datetimes" + ][0], + "recording_duration": np.sum( + probe_data.recording_info["recording_durations"] + ), + } + ) - # Insert channel information - # Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map( - key, acq_software + root_dir = find_root_directory( + get_ephys_root_data_dir(), + probe_data.recording_info["recording_files"][0], ) - self.Channel.insert( + self.EphysFile.insert( [ - {**key, "channel_idx": channel_idx, **channel_info} - for channel_idx, channel_info in channel2electrodes.items() + {**key, "file_path": fp.relative_to(root_dir).as_posix()} + for fp in probe_data.recording_info["recording_files"] ] ) + # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough + del probe_data, dataset + gc.collect() + + # Insert channel information + # Get channel and electrode-site mapping + channel2electrodes = get_neuropixels_channel2electrode_map( + key, acq_software + ) + self.Channel.insert( + [ + {**key, "channel_idx": channel_idx, **channel_info} + for channel_idx, channel_info in channel2electrodes.items() + ] + ) @schema From 226142b82614f4964a3f7c8655ffd2ca6f43dd3d Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 1 Mar 2024 23:52:16 +0000 Subject: [PATCH 097/204] feat: :sparkles: Update WaveformSet make function --- element_array_ephys/ephys_no_curation.py | 47 ++++++++---------------- 1 file changed, 16 insertions(+), 31 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 5d77a041..a721e5b6 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1248,6 +1248,16 @@ def make(self, key): "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method ) + # Get channel and electrode-site mapping + channel_info = ( + (EphysRecording.Channel & key) + .proj(..., "-channel_name") + .fetch(as_dict=True, order_by="channel_idx") + ) + channel_info: dict[int, dict] = { + ch.pop("channel_idx"): ch for ch in channel_info + } # e.g., {0: {'subject': 'sglx', 'session_id': 912231859, 'insertion_number': 1, 'electrode_config_hash': UUID('8d4cc6d8-a02d-42c8-bf27-7459c39ea0ee'), 'probe_type': 'neuropixels 1.0 - 3A', 'electrode': 0}} + if ( output_dir / sorter_name / "waveform" ).exists(): # read from spikeinterface outputs @@ -1256,27 +1266,12 @@ def make(self, key): we: si.WaveformExtractor = si.load_waveforms( waveform_dir, with_recording=False ) - unit_id_to_peak_channel_indices: dict[int, np.ndarray] = ( + unit_id_to_peak_channel_map: dict[int, np.ndarray] = ( si.ChannelSparsity.from_best_channels( we, 1, peak_sign="neg" ).unit_id_to_channel_indices ) # {unit: peak_channel_index} - units = (CuratedClustering.Unit & key).fetch("KEY", order_by="unit") - - # Get electrode info - electrode_config_key = ( - EphysRecording * probe.ElectrodeConfig & key - ).fetch1("KEY") - - electrode_query = ( - probe.ProbeType.Electrode.proj() * probe.ElectrodeConfig.Electrode - & electrode_config_key - ) - electrode_info = electrode_query.fetch( - "KEY", order_by="electrode", as_dict=True - ) - # Get mean waveform for each unit from all channels mean_waveforms = we.get_all_templates( mode="average" @@ -1285,13 +1280,13 @@ def make(self, key): unit_peak_waveform = [] unit_electrode_waveforms = [] - for unit in units: + for unit in (CuratedClustering.Unit & key).fetch("KEY", order_by="unit"): unit_peak_waveform.append( { **unit, "peak_electrode_waveform": we.get_template( unit_id=unit["unit"], mode="average", force_dense=True - )[:, unit_id_to_peak_channel_indices[unit["unit"]][0]], + )[:, unit_id_to_peak_channel_map[unit["unit"]][0]], } ) @@ -1299,12 +1294,12 @@ def make(self, key): [ { **unit, - **e, + **channel_info[c], "waveform_mean": mean_waveforms[ - unit["unit"], :, e["electrode"] + unit["unit"] - 1, :, c ], } - for e in electrode_info + for c in channel_info ] ) @@ -1319,16 +1314,6 @@ def make(self, key): EphysRecording * ProbeInsertion & key ).fetch1("acq_software", "probe") - # Get channel and electrode-site mapping - channel_info = ( - (EphysRecording.Channel & key) - .proj(..., "-channel_name") - .fetch(as_dict=True, order_by="channel_idx") - ) - channel_info: dict[int, dict] = { - ch.pop("channel_idx"): ch for ch in channel_info - } # e.g., {0: {'subject': 'sglx', 'session_id': 912231859, 'insertion_number': 1, 'electrode_config_hash': UUID('8d4cc6d8-a02d-42c8-bf27-7459c39ea0ee'), 'probe_type': 'neuropixels 1.0 - 3A', 'electrode': 0}} - # Get all units units = { u["unit"]: u From ea398391223d6ce8ac26ea22efc2240c86a95525 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 5 Mar 2024 09:24:14 -0600 Subject: [PATCH 098/204] refactor: :fire: remove & get_neuropixels_channel2electrode_map and generate_electrode_config --- element_array_ephys/ephys_no_curation.py | 305 +++++++++++------------ 1 file changed, 146 insertions(+), 159 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index a721e5b6..26608997 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -315,7 +315,7 @@ def make(self, key): "probe" ) - # search session dir and determine acquisition software + # Search session dir and determine acquisition software for ephys_pattern, ephys_acq_type in ( ("*.ap.meta", "SpikeGLX"), ("*.oebin", "Open Ephys"), @@ -338,62 +338,117 @@ def make(self, key): supported_probe_types = probe.ProbeType.fetch("probe_type") if acq_software == "SpikeGLX": - for meta_filepath in ephys_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format(key) - ) + spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) + spikeglx_meta = spikeglx.SpikeGLXMeta(spikeglx_meta_filepath) - if spikeglx_meta.probe_model in supported_probe_types: + if spikeglx_meta.probe_model not in supported_probe_types: + raise NotImplementedError( + f"Processing for neuropixels probe model {spikeglx_meta.probe_model} not yet implemented." + ) + else: probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & { - "probe_type": probe_type - } + electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} probe_electrodes = { (shank, shank_col, shank_row): key for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch( - "KEY", "shank", "shank_col", "shank_row" - ) + *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") ) - } - + } # electrode configuration electrode_group_members = [ probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap[ - "data" + for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] + ] # recording session-specific electrode configuration + + # Compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) + electrode_config_hash = dict_to_uuid( + {k["electrode"]: k for k in electrode_group_members} + ) + + electrode_list = sorted( + [k["electrode"] for k in electrode_group_members] + ) + electrode_gaps = ( + [-1] + + np.where(np.diff(electrode_list) > 1)[0].tolist() + + [len(electrode_list) - 1] + ) + electrode_config_name = "; ".join( + [ + f"{electrode_list[start + 1]}-{electrode_list[end]}" + for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) ] - ] - else: - raise NotImplementedError( - "Processing for neuropixels probe model" - " {} not yet implemented".format(spikeglx_meta.probe_model) ) + electrode_config_key = {"electrode_config_hash": electrode_config_hash} + + # Insert into ElectrodeConfig + if not probe.ElectrodeConfig & electrode_config_key: + probe.ElectrodeConfig.insert1( + { + **electrode_config_key, + "probe_type": probe_type, + "electrode_config_name": electrode_config_name, + } + ) + probe.ElectrodeConfig.Electrode.insert( + {**electrode_config_key, **electrode} + for electrode in electrode_group_members + ) + self.insert1( { **key, - **generate_electrode_config( - probe_type, electrode_group_members - ), + "electrode_config_hash": electrode_config_hash, "acq_software": acq_software, "sampling_rate": spikeglx_meta.meta["imSampRate"], "recording_datetime": spikeglx_meta.recording_time, "recording_duration": ( spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(meta_filepath) + or spikeglx.retrieve_recording_duration(spikeglx_meta_filepath) ), } ) - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) + root_dir = find_root_directory( + get_ephys_root_data_dir(), spikeglx_meta_filepath + ) self.EphysFile.insert1( - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} + { + **key, + "file_path": spikeglx_meta_filepath.relative_to( + root_dir + ).as_posix(), + } + ) + + # Insert channel information + # Get channel and electrode-site mapping + electrode_query = ( + probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode + & electrode_config_key + ) + + probe_electrodes = { + (shank, shank_col, shank_row): key + for key, shank, shank_col, shank_row in zip( + *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") + ) + } + + channel2electrode_map = { + recorded_site: probe_electrodes[(shank, shank_col, shank_row)] + for recorded_site, (shank, shank_col, shank_row, _) in enumerate( + spikeglx_meta.shankmap["data"] + ) + } + self.Channel.insert( + [ + {**key, "channel_idx": channel_idx, **channel_info} + for channel_idx, channel_info in channel2electrode_map.items() + ] ) + elif acq_software == "Open Ephys": dataset = openephys.OpenEphys(session_dir) for serial_number, probe_data in dataset.probes.items(): @@ -409,11 +464,13 @@ def make(self, key): 'No analog signals found - check "structure.oebin" file or "continuous" directory' ) - if probe_data.probe_model in supported_probe_types: + if probe_data.probe_model not in supported_probe_types: + raise NotImplementedError( + f"Processing for neuropixels probe model {spikeglx_meta.probe_model} not yet implemented." + ) + else: probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & { - "probe_type": probe_type - } + electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} probe_electrodes = { key["electrode"]: key for key in electrode_query.fetch("KEY") @@ -423,20 +480,33 @@ def make(self, key): probe_electrodes[channel_idx] for channel_idx in probe_data.ap_meta["channels_indices"] ] - else: - raise NotImplementedError( - "Processing for neuropixels" - " probe model {} not yet implemented".format( - probe_data.probe_model - ) + + # Compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) + electrode_config_hash = dict_to_uuid( + {k["electrode"]: k for k in electrode_group_members} + ) + + electrode_list = sorted( + [k["electrode"] for k in electrode_group_members] + ) + electrode_gaps = ( + [-1] + + np.where(np.diff(electrode_list) > 1)[0].tolist() + + [len(electrode_list) - 1] + ) + electrode_config_name = "; ".join( + [ + f"{electrode_list[start + 1]}-{electrode_list[end]}" + for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) + ] ) + electrode_config_key = {"electrode_config_hash": electrode_config_hash} + self.insert1( { **key, - **generate_electrode_config( - probe_type, electrode_group_members - ), + "electrode_config_hash": electrode_config_hash, "acq_software": acq_software, "sampling_rate": probe_data.ap_meta["sample_rate"], "recording_datetime": probe_data.recording_info[ @@ -462,17 +532,26 @@ def make(self, key): del probe_data, dataset gc.collect() - # Insert channel information - # Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map( - key, acq_software - ) - self.Channel.insert( - [ - {**key, "channel_idx": channel_idx, **channel_info} - for channel_idx, channel_info in channel2electrodes.items() - ] - ) + probe_dataset = get_openephys_probe_data(key) + electrode_query = ( + probe.ProbeType.Electrode + * probe.ElectrodeConfig.Electrode + * EphysRecording + & key + ) + probe_electrodes = { + key["electrode"]: key for key in electrode_query.fetch("KEY") + } + channel2electrode_map = { + channel_idx: probe_electrodes[channel_idx] + for channel_idx in probe_dataset.ap_meta["channels_indices"] + } + self.Channel.insert( + [ + {**key, "channel_idx": channel_idx, **channel_info} + for channel_idx, channel_info in channel2electrode_map.items() + ] + ) @schema @@ -1034,7 +1113,7 @@ def make(self, key): channel_info = electrode_query.fetch(as_dict=True, order_by="channel_idx") channel_info: dict[int, dict] = { ch.pop("channel_idx"): ch for ch in channel_info - } + } # Get unit id to quality label mapping try: @@ -1050,14 +1129,14 @@ def make(self, key): ] = cluster_quality_label_map.set_index("cluster_id")[ "KSLabel" ].to_dict() # {unit: quality_label} - + # Get electrode where peak unit activity is recorded peak_electrode_ind = np.array( [ channel_info[unit_peak_channel_map[unit_id]]["electrode"] for unit_id in si_sorting.unit_ids ] - ) + ) # Get channel depth channel_depth_ind = np.array( @@ -1066,14 +1145,17 @@ def make(self, key): for unit_id in si_sorting.unit_ids ] ) - + # Assign electrode and depth for each spike - new_spikes = np.empty(spikes.shape, spikes.dtype.descr + [('electrode', ' list: return probe_data -def get_neuropixels_channel2electrode_map( - ephys_recording_key: dict, acq_software: str -) -> dict: #TODO: remove this function - """Get the channel map for neuropixels probe.""" - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) - spikeglx_meta = spikeglx.SpikeGLXMeta(spikeglx_meta_filepath) - electrode_config_key = ( - EphysRecording * probe.ElectrodeConfig & ephys_recording_key - ).fetch1("KEY") - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & electrode_config_key - ) - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - channel2electrode_map = { - recorded_site: probe_electrodes[(shank, shank_col, shank_row)] - for recorded_site, (shank, shank_col, shank_row, _) in enumerate( - spikeglx_meta.shankmap["data"] - ) - } - elif acq_software == "Open Ephys": - probe_dataset = get_openephys_probe_data(ephys_recording_key) - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode * EphysRecording - & ephys_recording_key - ) - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - channel2electrode_map = { - channel_idx: probe_electrodes[channel_idx] - for channel_idx in probe_dataset.ap_meta["channels_indices"] - } - - return channel2electrode_map - - -def generate_electrode_config(probe_type: str, electrode_keys: list) -> dict: - """Generate and insert new ElectrodeConfig - - Args: - probe_type (str): probe type (e.g. neuropixels 2.0 - SS) - electrode_keys (list): list of keys of the probe.ProbeType.Electrode table - - Returns: - dict: representing a key of the probe.ElectrodeConfig table - """ - # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid({k["electrode"]: k for k in electrode_keys}) - - electrode_list = sorted([k["electrode"] for k in electrode_keys]) - electrode_gaps = ( - [-1] - + np.where(np.diff(electrode_list) > 1)[0].tolist() - + [len(electrode_list) - 1] - ) - electrode_config_name = "; ".join( - [ - f"{electrode_list[start + 1]}-{electrode_list[end]}" - for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) - ] - ) - - electrode_config_key = {"electrode_config_hash": electrode_config_hash} - - # ---- make new ElectrodeConfig if needed ---- - if not probe.ElectrodeConfig & electrode_config_key: - probe.ElectrodeConfig.insert1( - { - **electrode_config_key, - "probe_type": probe_type, - "electrode_config_name": electrode_config_name, - } - ) - probe.ElectrodeConfig.Electrode.insert( - {**electrode_config_key, **electrode} for electrode in electrode_keys - ) - - return electrode_config_key - - def get_recording_channels_details(ephys_recording_key: dict) -> np.array: """Get details of recording channels for a given recording.""" channels_details = {} From 5bfe201293b5c2ee34a0fb0d666ed280caa22cf9 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 5 Mar 2024 14:53:51 -0600 Subject: [PATCH 099/204] Update element_array_ephys/ephys_no_curation.py Co-authored-by: Thinh Nguyen --- element_array_ephys/ephys_no_curation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 26608997..2e45d4fe 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -326,7 +326,7 @@ def make(self, key): break else: raise FileNotFoundError( - f"Ephys recording data not found! for {key}." + f"Ephys recording data not found in {session_dir}." "Neither SpikeGLX nor Open Ephys recording files found" ) From 1d805849f20ee1e5ce6a911d06d189f9d699900f Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 5 Mar 2024 15:19:36 -0600 Subject: [PATCH 100/204] add generate_electrode_config_name --- element_array_ephys/ephys_no_curation.py | 96 ++++++++++++------------ 1 file changed, 47 insertions(+), 49 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 2e45d4fe..7ce99df2 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -364,38 +364,10 @@ def make(self, key): electrode_config_hash = dict_to_uuid( {k["electrode"]: k for k in electrode_group_members} ) - - electrode_list = sorted( - [k["electrode"] for k in electrode_group_members] - ) - electrode_gaps = ( - [-1] - + np.where(np.diff(electrode_list) > 1)[0].tolist() - + [len(electrode_list) - 1] - ) - electrode_config_name = "; ".join( - [ - f"{electrode_list[start + 1]}-{electrode_list[end]}" - for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) - ] + electrode_config_name = generate_electrode_config_name( + probe_type, electrode_group_members ) - electrode_config_key = {"electrode_config_hash": electrode_config_hash} - - # Insert into ElectrodeConfig - if not probe.ElectrodeConfig & electrode_config_key: - probe.ElectrodeConfig.insert1( - { - **electrode_config_key, - "probe_type": probe_type, - "electrode_config_name": electrode_config_name, - } - ) - probe.ElectrodeConfig.Electrode.insert( - {**electrode_config_key, **electrode} - for electrode in electrode_group_members - ) - self.insert1( { **key, @@ -426,7 +398,7 @@ def make(self, key): # Get channel and electrode-site mapping electrode_query = ( probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & electrode_config_key + & {"electrode_config_hash": electrode_config_hash} ) probe_electrodes = { @@ -474,34 +446,20 @@ def make(self, key): probe_electrodes = { key["electrode"]: key for key in electrode_query.fetch("KEY") - } + } # electrode configuration electrode_group_members = [ probe_electrodes[channel_idx] for channel_idx in probe_data.ap_meta["channels_indices"] - ] + ] # recording session-specific electrode configuration # Compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) electrode_config_hash = dict_to_uuid( {k["electrode"]: k for k in electrode_group_members} ) - - electrode_list = sorted( - [k["electrode"] for k in electrode_group_members] - ) - electrode_gaps = ( - [-1] - + np.where(np.diff(electrode_list) > 1)[0].tolist() - + [len(electrode_list) - 1] + electrode_config_name = generate_electrode_config_name( + probe_type, electrode_group_members ) - electrode_config_name = "; ".join( - [ - f"{electrode_list[start + 1]}-{electrode_list[end]}" - for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) - ] - ) - - electrode_config_key = {"electrode_config_hash": electrode_config_hash} self.insert1( { @@ -553,6 +511,20 @@ def make(self, key): ] ) + # Insert into probe.ElectrodeConfig (recording configuration) + if not probe.ElectrodeConfig & {"electrode_config_hash": electrode_config_hash}: + probe.ElectrodeConfig.insert1( + { + "probe_type": probe_type, + "electrode_config_hash": electrode_config_hash, + "electrode_config_name": electrode_config_name, + } + ) + probe.ElectrodeConfig.Electrode.insert( + {"electrode_config_hash": electrode_config_hash, **electrode} + for electrode in electrode_group_members + ) + @schema class LFP(dj.Imported): @@ -1820,3 +1792,29 @@ def get_recording_channels_details(ephys_recording_key: dict) -> np.array: ) return channels_details + + +def generate_electrode_config_name(probe_type: str, electrode_keys: list) -> str: + """Generate electrode config name. + + Args: + probe_type (str): probe type (e.g. neuropixels 2.0 - SS) + electrode_keys (list): list of keys of the probe.ProbeType.Electrode table + + Returns: + electrode_config_name (str) + """ + electrode_list = sorted([k["electrode"] for k in electrode_keys]) + electrode_gaps = ( + [-1] + + np.where(np.diff(electrode_list) > 1)[0].tolist() + + [len(electrode_list) - 1] + ) + electrode_config_name = "; ".join( + [ + f"{electrode_list[start + 1]}-{electrode_list[end]}" + for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) + ] + ) + + return electrode_config_name From 88ce139bec18a0a01b13943b92c57dbcbc64e074 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Thu, 7 Mar 2024 12:29:50 -0600 Subject: [PATCH 101/204] refactor: :recycle: change sorter_name --- .../spike_sorting/si_spike_sorting.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 6df25de8..a7d1b963 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -100,9 +100,7 @@ def make(self, key): ).fetch1("clustering_method", "acq_software", "clustering_output_dir", "params") # Get sorter method and create output directory. - sorter_name = ( - "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method - ) + sorter_name = clustering_method.replace(".", "_") for required_key in ( "SI_SORTING_PARAMS", @@ -209,9 +207,7 @@ def make(self, key): output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) # Get sorter method and create output directory. - sorter_name = ( - "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method - ) + sorter_name = clustering_method.replace(".", "_") recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) @@ -264,10 +260,7 @@ def make(self, key): output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) # Get sorter method and create output directory. - sorter_name = ( - "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method - ) - + sorter_name = clustering_method.replace(".", "_") output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" From 7eaefa49852f0e0807497de37a3c19d30ee1c5f2 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Fri, 8 Mar 2024 11:11:38 -0600 Subject: [PATCH 102/204] address review comments for generate_electrode_config_entry --- element_array_ephys/ephys_no_curation.py | 55 +++++++++++------------- 1 file changed, 26 insertions(+), 29 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 7ce99df2..a6edbe54 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -360,18 +360,14 @@ def make(self, key): for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] ] # recording session-specific electrode configuration - # Compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid( - {k["electrode"]: k for k in electrode_group_members} - ) - electrode_config_name = generate_electrode_config_name( + econfig_entry, econfig_electrodes = generate_electrode_config_entry( probe_type, electrode_group_members ) self.insert1( { **key, - "electrode_config_hash": electrode_config_hash, + "electrode_config_hash": econfig_entry["electrode_config_hash"], "acq_software": acq_software, "sampling_rate": spikeglx_meta.meta["imSampRate"], "recording_datetime": spikeglx_meta.recording_time, @@ -398,7 +394,7 @@ def make(self, key): # Get channel and electrode-site mapping electrode_query = ( probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & {"electrode_config_hash": electrode_config_hash} + & {"electrode_config_hash": econfig_entry["electrode_config_hash"]} ) probe_electrodes = { @@ -453,18 +449,14 @@ def make(self, key): for channel_idx in probe_data.ap_meta["channels_indices"] ] # recording session-specific electrode configuration - # Compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid( - {k["electrode"]: k for k in electrode_group_members} - ) - electrode_config_name = generate_electrode_config_name( + econfig_entry, econfig_electrodes = generate_electrode_config_entry( probe_type, electrode_group_members ) self.insert1( { **key, - "electrode_config_hash": electrode_config_hash, + "electrode_config_hash": econfig_entry["electrode_config_hash"], "acq_software": acq_software, "sampling_rate": probe_data.ap_meta["sample_rate"], "recording_datetime": probe_data.recording_info[ @@ -512,18 +504,11 @@ def make(self, key): ) # Insert into probe.ElectrodeConfig (recording configuration) - if not probe.ElectrodeConfig & {"electrode_config_hash": electrode_config_hash}: - probe.ElectrodeConfig.insert1( - { - "probe_type": probe_type, - "electrode_config_hash": electrode_config_hash, - "electrode_config_name": electrode_config_name, - } - ) - probe.ElectrodeConfig.Electrode.insert( - {"electrode_config_hash": electrode_config_hash, **electrode} - for electrode in electrode_group_members - ) + if not probe.ElectrodeConfig & { + "electrode_config_hash": econfig_entry["electrode_config_hash"] + }: + probe.ElectrodeConfig.insert1(econfig_entry) + probe.ElectrodeConfig.Electrode.insert(econfig_electrodes) @schema @@ -1794,16 +1779,19 @@ def get_recording_channels_details(ephys_recording_key: dict) -> np.array: return channels_details -def generate_electrode_config_name(probe_type: str, electrode_keys: list) -> str: - """Generate electrode config name. +def generate_electrode_config_entry(probe_type: str, electrode_keys: list) -> dict: + """Generate and insert new ElectrodeConfig Args: probe_type (str): probe type (e.g. neuropixels 2.0 - SS) electrode_keys (list): list of keys of the probe.ProbeType.Electrode table Returns: - electrode_config_name (str) + dict: representing a key of the probe.ElectrodeConfig table """ + # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) + electrode_config_hash = dict_to_uuid({k["electrode"]: k for k in electrode_keys}) + electrode_list = sorted([k["electrode"] for k in electrode_keys]) electrode_gaps = ( [-1] @@ -1816,5 +1804,14 @@ def generate_electrode_config_name(probe_type: str, electrode_keys: list) -> str for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) ] ) + electrode_config_key = {"electrode_config_hash": electrode_config_hash} + econfig_entry = { + **electrode_config_key, + "probe_type": probe_type, + "electrode_config_name": electrode_config_name, + } + econfig_electrodes = [ + {**electrode, **electrode_config_key} for electrode in electrode_keys + ] - return electrode_config_name + return econfig_entry, econfig_electrodes From d47be56dd8baeb88d8238701c1aa5ada457d3c36 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 11 Mar 2024 18:39:58 -0500 Subject: [PATCH 103/204] refactor: :art: refactor PostProcessing --- element_array_ephys/spike_sorting/si_spike_sorting.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index a7d1b963..a3f54e1e 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -253,15 +253,14 @@ class PostProcessing(dj.Imported): def make(self, key): execution_time = datetime.utcnow() - # Load recording object. + # Load recording & sorting object. clustering_method, output_dir, params = ( ephys.ClusteringTask * ephys.ClusteringParamSet & key ).fetch1("clustering_method", "clustering_output_dir", "params") output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - # Get sorter method and create output directory. sorter_name = clustering_method.replace(".", "_") output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" @@ -301,14 +300,13 @@ def make(self, key): _ = si.postprocessing.compute_principal_components( waveform_extractor=we, **params.get("SI_QUALITY_METRICS_PARAMS", None) ) + metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) + # Save the output (metrics.csv to the output dir) metrics_output_dir = output_dir / sorter_name / "metrics" metrics_output_dir.mkdir(parents=True, exist_ok=True) - - metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) metrics.to_csv(metrics_output_dir / "metrics.csv") - # Save results self.insert1( { **key, From 8dfc8583017864ce23d22865b8075339a274b1f3 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 11 Mar 2024 18:43:09 -0500 Subject: [PATCH 104/204] chore: :art: run docker if the package is not built into spikeinterface --- element_array_ephys/spike_sorting/si_spike_sorting.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index a3f54e1e..c74ee9d4 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -205,23 +205,23 @@ def make(self, key): ephys.ClusteringTask * ephys.ClusteringParamSet & key ).fetch1("clustering_method", "clustering_output_dir", "params") output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - # Get sorter method and create output directory. sorter_name = clustering_method.replace(".", "_") recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" si_recording: si.BaseRecording = si.load_extractor(recording_file) # Run sorting + # Sorting performed in a dedicated docker environment if the sorter is not built in the spikeinterface package. si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( sorter_name=sorter_name, recording=si_recording, output_folder=output_dir / sorter_name / "spike_sorting", remove_existing_folder=True, verbose=True, - docker_image=True, + docker_image=sorter_name not in si.sorters.installed_sorters(), **params.get("SI_SORTING_PARAMS", {}), ) + # Save sorting object sorting_save_path = ( output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" ) From 6e20a11e92455220eebc065cd4870dc856739544 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 11 Mar 2024 18:45:21 -0500 Subject: [PATCH 105/204] refactor: :recycle: clean up import & docstring --- .../spike_sorting/si_spike_sorting.py | 21 ++----------------- 1 file changed, 2 insertions(+), 19 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index c74ee9d4..36956d8f 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -1,34 +1,17 @@ """ -The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the -"spikeinterface" pipeline. -Spikeinterface developed by Alessio Buccino, Samuel Garcia, Cole Hurwitz, Jeremy Magland, and Matthias Hennig (https://github.com/SpikeInterface) - -The DataJoint pipeline currently incorporated Spikeinterfaces approach of running Kilosort using a container - -The follow pipeline features intermediary tables: -1. PreProcessing - for preprocessing steps (no GPU required) - - create recording extractor and link it to a probe - - bandpass filtering - - common mode referencing -2. SIClustering - kilosort (MATLAB) - requires GPU and docker/singularity containers - - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) -3. PostProcessing - for postprocessing steps (no GPU required) - - create waveform extractor object - - extract templates, waveforms and snrs - - quality_metrics +The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the "spikeinterface" pipeline. Spikeinterface was developed by Alessio Buccino, Samuel Garcia, Cole Hurwitz, Jeremy Magland, and Matthias Hennig (https://github.com/SpikeInterface) """ -import pathlib from datetime import datetime import datajoint as dj import pandas as pd -import probeinterface as pi import spikeinterface as si from element_array_ephys import get_logger, probe, readers from element_interface.utils import find_full_path from spikeinterface import exporters, postprocessing, qualitymetrics, sorters +from .. import get_logger, probe, readers from . import si_preprocessing log = get_logger(__name__) From 8d04e10ce8e08370d9052b1a085bfbdf89d53fbd Mon Sep 17 00:00:00 2001 From: JaerongA Date: Mon, 11 Mar 2024 18:55:01 -0500 Subject: [PATCH 106/204] revert: :art: replace SI_READERS with si_extractor --- .../spike_sorting/si_spike_sorting.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 36956d8f..2ebe90ba 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -47,12 +47,6 @@ def activate( SI_SORTERS = [s.replace("_", ".") for s in si.sorters.sorter_dict.keys()] -SI_READERS = { - "Open Ephys": si.extractors.read_openephys, - "SpikeGLX": si.extractors.read_spikeglx, - "Intan": si.extractors.read_intan, -} - @schema class PreProcessing(dj.Imported): @@ -108,9 +102,7 @@ def make(self, key): output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) recording_dir = output_dir / sorter_name / "recording" recording_dir.mkdir(parents=True, exist_ok=True) - recording_file = ( - recording_dir / "si_recording.pkl" - ) # recording cache to be created for each key + recording_file = recording_dir / "si_recording.pkl" # Create SI recording extractor object if acq_software == "SpikeGLX": @@ -125,12 +117,16 @@ def make(self, key): assert len(oe_probe.recording_info["recording_files"]) == 1 data_dir = oe_probe.recording_info["recording_files"][0] else: - raise NotImplementedError(f"Not implemented for {acq_software}") + si_extractor: si.extractors.neoextractors = ( + si.extractors.extractorlist.recording_extractor_full_dict[ + acq_software.replace(" ", "").lower() + ] + ) # data extractor object stream_names, stream_ids = si.extractors.get_neo_streams( acq_software.strip().lower(), folder_path=data_dir ) - si_recording: si.BaseRecording = SI_READERS[acq_software]( + si_recording: si.BaseRecording = si_extractor[acq_software]( folder_path=data_dir, stream_name=stream_names[0] ) From bb39194aeb2a06390be6b0415afe0bd46310dbbf Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 12 Mar 2024 10:39:15 -0500 Subject: [PATCH 107/204] fix acq_software name --- element_array_ephys/spike_sorting/si_spike_sorting.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 2ebe90ba..12fc069b 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -117,14 +117,13 @@ def make(self, key): assert len(oe_probe.recording_info["recording_files"]) == 1 data_dir = oe_probe.recording_info["recording_files"][0] else: + acq_software = acq_software.replace(" ", "").lower() si_extractor: si.extractors.neoextractors = ( - si.extractors.extractorlist.recording_extractor_full_dict[ - acq_software.replace(" ", "").lower() - ] + si.extractors.extractorlist.recording_extractor_full_dict[acq_software] ) # data extractor object stream_names, stream_ids = si.extractors.get_neo_streams( - acq_software.strip().lower(), folder_path=data_dir + acq_software, folder_path=data_dir ) si_recording: si.BaseRecording = si_extractor[acq_software]( folder_path=data_dir, stream_name=stream_names[0] From 01ff816fd4e2077c3b9c3ff4c5c439faddbb43c9 Mon Sep 17 00:00:00 2001 From: JaerongA Date: Tue, 12 Mar 2024 10:54:39 -0500 Subject: [PATCH 108/204] feat: :ambulance: make all secondary attributes nullable in QualityMetrics some sorters don't output values expected by the table --- element_array_ephys/ephys_no_curation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index a6edbe54..bfb3e2ad 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1603,8 +1603,8 @@ class Waveform(dj.Part): -> master -> CuratedClustering.Unit --- - amplitude: float # (uV) absolute difference between waveform peak and trough - duration: float # (ms) time between waveform peak and trough + amplitude=null: float # (uV) absolute difference between waveform peak and trough + duration=null: float # (ms) time between waveform peak and trough halfwidth=null: float # (ms) spike width at half max amplitude pt_ratio=null: float # absolute amplitude of peak divided by absolute amplitude of trough relative to 0 repolarization_slope=null: float # the repolarization slope was defined by fitting a regression line to the first 30us from trough to peak From 67a1ffc767261e5a9c7d9e7c85d418005c3dac80 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 17 Apr 2024 09:15:31 -0500 Subject: [PATCH 109/204] feat: save spike interface results with relative path --- element_array_ephys/spike_sorting/si_spike_sorting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 12fc069b..ba310d6e 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -150,7 +150,7 @@ def make(self, key): # Run preprocessing and save results to output folder si_preproc_func = getattr(si_preprocessing, params["SI_PREPROCESSING_METHOD"]) si_recording = si_preproc_func(si_recording) - si_recording.dump_to_pickle(file_path=recording_file) + si_recording.dump_to_pickle(file_path=recording_file, relative_to=output_dir) self.insert1( { @@ -203,7 +203,7 @@ def make(self, key): sorting_save_path = ( output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" ) - si_sorting.dump_to_pickle(sorting_save_path) + si_sorting.dump_to_pickle(sorting_save_path, relative_to=output_dir) self.insert1( { From d44dbaa03aa8debb2f9d15fe60811a4fcb52a535 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 26 Apr 2024 11:50:15 -0500 Subject: [PATCH 110/204] fix(spikeglx): bugfix loading spikeglx data --- element_array_ephys/ephys_no_curation.py | 11 +++++++++-- element_array_ephys/spike_sorting/si_spike_sorting.py | 11 +++++++---- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index bfb3e2ad..2dde282b 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -338,8 +338,15 @@ def make(self, key): supported_probe_types = probe.ProbeType.fetch("probe_type") if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - spikeglx_meta = spikeglx.SpikeGLXMeta(spikeglx_meta_filepath) + for meta_filepath in ephys_meta_filepaths: + spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) + if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: + spikeglx_meta_filepath = meta_filepath + break + else: + raise FileNotFoundError( + "No SpikeGLX data found for probe insertion: {}".format(key) + ) if spikeglx_meta.probe_model not in supported_probe_types: raise NotImplementedError( diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 12fc069b..0b53bf1d 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -117,10 +117,13 @@ def make(self, key): assert len(oe_probe.recording_info["recording_files"]) == 1 data_dir = oe_probe.recording_info["recording_files"][0] else: - acq_software = acq_software.replace(" ", "").lower() - si_extractor: si.extractors.neoextractors = ( - si.extractors.extractorlist.recording_extractor_full_dict[acq_software] - ) # data extractor object + raise NotImplementedError( + f"SpikeInterface processing for {acq_software} not yet implemented." + ) + acq_software = acq_software.replace(" ", "").lower() + si_extractor: si.extractors.neoextractors = ( + si.extractors.extractorlist.recording_extractor_full_dict[acq_software] + ) # data extractor object stream_names, stream_ids = si.extractors.get_neo_streams( acq_software, folder_path=data_dir From d86928bf41a2bb0e30c7136d74fc485c9de2b90f Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 26 Apr 2024 12:13:15 -0500 Subject: [PATCH 111/204] fix: bugfix inserting `ElectrodeConfig` --- element_array_ephys/ephys_no_curation.py | 94 ++++++++++++------------ 1 file changed, 48 insertions(+), 46 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 2dde282b..dcb2ded6 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -371,31 +371,30 @@ def make(self, key): probe_type, electrode_group_members ) - self.insert1( - { - **key, - "electrode_config_hash": econfig_entry["electrode_config_hash"], - "acq_software": acq_software, - "sampling_rate": spikeglx_meta.meta["imSampRate"], - "recording_datetime": spikeglx_meta.recording_time, - "recording_duration": ( - spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(spikeglx_meta_filepath) - ), - } - ) + ephys_recording_entry = { + **key, + "electrode_config_hash": econfig_entry["electrode_config_hash"], + "acq_software": acq_software, + "sampling_rate": spikeglx_meta.meta["imSampRate"], + "recording_datetime": spikeglx_meta.recording_time, + "recording_duration": ( + spikeglx_meta.recording_duration + or spikeglx.retrieve_recording_duration(spikeglx_meta_filepath) + ), + } root_dir = find_root_directory( get_ephys_root_data_dir(), spikeglx_meta_filepath ) - self.EphysFile.insert1( + + ephys_file_entries = [ { **key, "file_path": spikeglx_meta_filepath.relative_to( root_dir ).as_posix(), } - ) + ] # Insert channel information # Get channel and electrode-site mapping @@ -417,13 +416,11 @@ def make(self, key): spikeglx_meta.shankmap["data"] ) } - self.Channel.insert( - [ - {**key, "channel_idx": channel_idx, **channel_info} - for channel_idx, channel_info in channel2electrode_map.items() - ] - ) + ephys_channel_entries = [ + {**key, "channel_idx": channel_idx, **channel_info} + for channel_idx, channel_info in channel2electrode_map.items() + ] elif acq_software == "Open Ephys": dataset = openephys.OpenEphys(session_dir) for serial_number, probe_data in dataset.probes.items(): @@ -460,31 +457,29 @@ def make(self, key): probe_type, electrode_group_members ) - self.insert1( - { - **key, - "electrode_config_hash": econfig_entry["electrode_config_hash"], - "acq_software": acq_software, - "sampling_rate": probe_data.ap_meta["sample_rate"], - "recording_datetime": probe_data.recording_info[ - "recording_datetimes" - ][0], - "recording_duration": np.sum( - probe_data.recording_info["recording_durations"] - ), - } - ) + ephys_recording_entry = { + **key, + "electrode_config_hash": econfig_entry["electrode_config_hash"], + "acq_software": acq_software, + "sampling_rate": probe_data.ap_meta["sample_rate"], + "recording_datetime": probe_data.recording_info["recording_datetimes"][ + 0 + ], + "recording_duration": np.sum( + probe_data.recording_info["recording_durations"] + ), + } root_dir = find_root_directory( get_ephys_root_data_dir(), probe_data.recording_info["recording_files"][0], ) - self.EphysFile.insert( - [ - {**key, "file_path": fp.relative_to(root_dir).as_posix()} - for fp in probe_data.recording_info["recording_files"] - ] - ) + + ephys_file_entries = [ + {**key, "file_path": fp.relative_to(root_dir).as_posix()} + for fp in probe_data.recording_info["recording_files"] + ] + # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough del probe_data, dataset gc.collect() @@ -503,11 +498,14 @@ def make(self, key): channel_idx: probe_electrodes[channel_idx] for channel_idx in probe_dataset.ap_meta["channels_indices"] } - self.Channel.insert( - [ - {**key, "channel_idx": channel_idx, **channel_info} - for channel_idx, channel_info in channel2electrode_map.items() - ] + + ephys_channel_entries = [ + {**key, "channel_idx": channel_idx, **channel_info} + for channel_idx, channel_info in channel2electrode_map.items() + ] + else: + raise NotImplementedError( + f"Processing ephys files from acquisition software of type {acq_software} is not yet implemented." ) # Insert into probe.ElectrodeConfig (recording configuration) @@ -517,6 +515,10 @@ def make(self, key): probe.ElectrodeConfig.insert1(econfig_entry) probe.ElectrodeConfig.Electrode.insert(econfig_electrodes) + self.insert1(ephys_recording_entry) + self.EphysFile.insert(ephys_file_entries) + self.Channel.insert(ephys_channel_entries) + @schema class LFP(dj.Imported): From f8ffd7760cb1be6ac19d24e37ebf69d11d773972 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 3 Apr 2024 14:14:35 -0500 Subject: [PATCH 112/204] feat(spikesorting): save to phy and generate report --- element_array_ephys/spike_sorting/si_spike_sorting.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 90a88260..52c96709 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -288,6 +288,11 @@ def make(self, key): metrics_output_dir.mkdir(parents=True, exist_ok=True) metrics.to_csv(metrics_output_dir / "metrics.csv") + # Save to phy format + si.exporters.export_to_phy(waveform_extractor=we, output_folder=output_dir / sorter_name / "phy") + # Generate spike interface report + si.exporters.export_report(waveform_extractor=we, output_folder=output_dir / sorter_name / "spikeinterface_report") + self.insert1( { **key, From 7309082858b5210dcbf9566f2e8afd72416e9655 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 26 Apr 2024 12:35:39 -0500 Subject: [PATCH 113/204] chore: cleanup init --- element_array_ephys/__init__.py | 21 ------------------- .../spike_sorting/ecephys_spike_sorting.py | 3 +-- .../spike_sorting/si_spike_sorting.py | 5 ++--- 3 files changed, 3 insertions(+), 26 deletions(-) diff --git a/element_array_ephys/__init__.py b/element_array_ephys/__init__.py index 3a0e5af6..1c0c7285 100644 --- a/element_array_ephys/__init__.py +++ b/element_array_ephys/__init__.py @@ -1,22 +1 @@ -""" -isort:skip_file -""" - -import logging -import os - -import datajoint as dj - - -__all__ = ["ephys", "get_logger"] - -dj.config["enable_python_native_blobs"] = True - - -def get_logger(name): - log = logging.getLogger(name) - log.setLevel(os.getenv("LOGLEVEL", "INFO")) - return log - - from . import ephys_acute as ephys diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py index 4de349eb..3a43c384 100644 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py @@ -22,7 +22,6 @@ import datajoint as dj -from element_array_ephys import get_logger from decimal import Decimal import json from datetime import datetime, timedelta @@ -33,7 +32,7 @@ kilosort_triggering, ) -log = get_logger(__name__) +log = dj.logger schema = dj.schema() diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 52c96709..306c1eb6 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -7,14 +7,13 @@ import datajoint as dj import pandas as pd import spikeinterface as si -from element_array_ephys import get_logger, probe, readers +from element_array_ephys import probe, readers from element_interface.utils import find_full_path from spikeinterface import exporters, postprocessing, qualitymetrics, sorters -from .. import get_logger, probe, readers from . import si_preprocessing -log = get_logger(__name__) +log = dj.logger schema = dj.schema() From d778b1e7d8822173ad43d60707fbb8fa8c7ff801 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 26 Apr 2024 13:24:45 -0500 Subject: [PATCH 114/204] fix: update channel-electrode mapping --- element_array_ephys/ephys_no_curation.py | 164 ++++++++---------- .../spike_sorting/si_spike_sorting.py | 13 +- 2 files changed, 81 insertions(+), 96 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index dcb2ded6..68251309 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1040,51 +1040,47 @@ def make(self, key): ).fetch1("clustering_method", "clustering_output_dir") output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - # Get sorter method and create output directory. - sorter_name = ( - "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method + # Get channel and electrode-site mapping + electrode_query = ( + (EphysRecording.Channel & key) + .proj(..., "-channel_name") ) - waveform_dir = output_dir / sorter_name / "waveform" - sorting_dir = output_dir / sorter_name / "spike_sorting" + channel2electrode_map = electrode_query.fetch(as_dict=True) + channel2electrode_map: dict[int, dict] = { + chn.pop("channel_idx"): chn for chn in channel2electrode_map + } - if waveform_dir.exists(): # read from spikeinterface outputs - we: si.WaveformExtractor = si.load_waveforms( - waveform_dir, with_recording=False - ) + # Get sorter method and create output directory. + sorter_name = clustering_method.replace(".", "_") + si_waveform_dir = output_dir / sorter_name / "waveform" + si_sorting_dir = output_dir / sorter_name / "spike_sorting" + + if si_waveform_dir.exists(): + + # Read from spikeinterface outputs + we: si.WaveformExtractor = si.load_waveforms(si_waveform_dir, with_recording=False) si_sorting: si.sorters.BaseSorter = si.load_extractor( - sorting_dir / "si_sorting.pkl" + si_sorting_dir / "si_sorting.pkl" ) - unit_peak_channel_map: dict[int, int] = si.get_template_extremum_channel( - we, outputs="index" - ) # {unit: peak_channel_index} + unit_peak_channel: dict[int, int] = si.get_template_extremum_channel( + we, outputs="id" + ) # {unit: peak_channel_id} spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() # {unit: spike_count} - spikes = si_sorting.to_spike_vector( - extremum_channel_inds=unit_peak_channel_map - ) - - # Get electrode & channel info - electrode_config_key = ( - EphysRecording * probe.ElectrodeConfig & key - ).fetch1("KEY") - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & electrode_config_key - ) * (dj.U("electrode", "channel_idx") & EphysRecording.Channel) + spikes = si_sorting.to_spike_vector() - channel_info = electrode_query.fetch(as_dict=True, order_by="channel_idx") - channel_info: dict[int, dict] = { - ch.pop("channel_idx"): ch for ch in channel_info + # reorder channel2electrode_map according to recording channel ids + channel2electrode_map = { + chn_id: channel2electrode_map[int(chn_id)] for chn_id in we.channel_ids } # Get unit id to quality label mapping try: cluster_quality_label_map = pd.read_csv( - sorting_dir / "sorter_output" / "cluster_KSLabel.tsv", + si_sorting_dir / "sorter_output" / "cluster_KSLabel.tsv", delimiter="\t", ) except FileNotFoundError: @@ -1099,7 +1095,7 @@ def make(self, key): # Get electrode where peak unit activity is recorded peak_electrode_ind = np.array( [ - channel_info[unit_peak_channel_map[unit_id]]["electrode"] + channel2electrode_map[unit_peak_channel[unit_id]]["electrode"] for unit_id in si_sorting.unit_ids ] ) @@ -1107,7 +1103,7 @@ def make(self, key): # Get channel depth channel_depth_ind = np.array( [ - channel_info[unit_peak_channel_map[unit_id]]["y_coord"] + channel2electrode_map[unit_peak_channel[unit_id]]["y_coord"] for unit_id in si_sorting.unit_ids ] ) @@ -1132,7 +1128,7 @@ def make(self, key): units.append( { **key, - **channel_info[unit_peak_channel_map[unit_id]], + **channel2electrode_map[unit_peak_channel[unit_id]], "unit": unit_id, "cluster_quality_label": cluster_quality_label_map.get( unit_id, "n.a." @@ -1143,10 +1139,10 @@ def make(self, key): "spike_count": spike_count_dict[unit_id], "spike_sites": new_spikes["electrode"][ new_spikes["unit_index"] == unit_id - ], + ], "spike_depths": new_spikes["depth"][ new_spikes["unit_index"] == unit_id - ], + ], } ) @@ -1184,20 +1180,10 @@ def make(self, key): spike_times = kilosort_dataset.data[spike_time_key] kilosort_dataset.extract_spike_depths() - # Get channel and electrode-site mapping - channel_info = ( - (EphysRecording.Channel & key) - .proj(..., "-channel_name") - .fetch(as_dict=True, order_by="channel_idx") - ) - channel_info: dict[int, dict] = { - ch.pop("channel_idx"): ch for ch in channel_info - } # e.g., {0: {'subject': 'sglx', 'session_id': 912231859, 'insertion_number': 1, 'electrode_config_hash': UUID('8d4cc6d8-a02d-42c8-bf27-7459c39ea0ee'), 'probe_type': 'neuropixels 1.0 - 3A', 'electrode': 0}} - # -- Spike-sites and Spike-depths -- spike_sites = np.array( [ - channel_info[s]["electrode"] + channel2electrode_map[s]["electrode"] for s in kilosort_dataset.data["spike_sites"] ] ) @@ -1219,7 +1205,7 @@ def make(self, key): **key, "unit": unit, "cluster_quality_label": unit_lbl, - **channel_info[unit_channel], + **channel2electrode_map[unit_channel], "spike_times": unit_spike_times, "spike_count": spike_count, "spike_sites": spike_sites[ @@ -1292,33 +1278,31 @@ def make(self, key): ClusteringTask * ClusteringParamSet & key ).fetch1("clustering_method", "clustering_output_dir") output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - sorter_name = ( - "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method - ) + sorter_name = clustering_method.replace(".", "_") # Get channel and electrode-site mapping - channel_info = ( + electrode_query = ( (EphysRecording.Channel & key) .proj(..., "-channel_name") - .fetch(as_dict=True, order_by="channel_idx") ) - channel_info: dict[int, dict] = { - ch.pop("channel_idx"): ch for ch in channel_info - } # e.g., {0: {'subject': 'sglx', 'session_id': 912231859, 'insertion_number': 1, 'electrode_config_hash': UUID('8d4cc6d8-a02d-42c8-bf27-7459c39ea0ee'), 'probe_type': 'neuropixels 1.0 - 3A', 'electrode': 0}} + channel2electrode_map = electrode_query.fetch(as_dict=True) + channel2electrode_map: dict[int, dict] = { + chn.pop("channel_idx"): chn for chn in channel2electrode_map + } - if ( - output_dir / sorter_name / "waveform" - ).exists(): # read from spikeinterface outputs + si_waveform_dir = output_dir / sorter_name / "waveform" + if si_waveform_dir.exists(): # read from spikeinterface outputs + we: si.WaveformExtractor = si.load_waveforms(si_waveform_dir, with_recording=False) + unit_id_to_peak_channel_map: dict[ + int, np.ndarray + ] = si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices # {unit: peak_channel_index} - waveform_dir = output_dir / sorter_name / "waveform" - we: si.WaveformExtractor = si.load_waveforms( - waveform_dir, with_recording=False - ) - unit_id_to_peak_channel_map: dict[int, np.ndarray] = ( - si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices - ) # {unit: peak_channel_index} + # reorder channel2electrode_map according to recording channel ids + channel2electrode_map = { + chn_id: channel2electrode_map[int(chn_id)] for chn_id in we.channel_ids + } # Get mean waveform for each unit from all channels mean_waveforms = we.get_all_templates( @@ -1329,30 +1313,32 @@ def make(self, key): unit_electrode_waveforms = [] for unit in (CuratedClustering.Unit & key).fetch("KEY", order_by="unit"): + unit_waveforms = we.get_template( + unit_id=unit["unit"], mode="average", force_dense=True + ) # (sample x channel) + peak_chn_idx = list(we.channel_ids).index( + unit_id_to_peak_channel_map[unit["unit"]][0] + ) unit_peak_waveform.append( { **unit, - "peak_electrode_waveform": we.get_template( - unit_id=unit["unit"], mode="average", force_dense=True - )[:, unit_id_to_peak_channel_map[unit["unit"]][0]], + "peak_electrode_waveform": unit_waveforms[:, peak_chn_idx], } ) - unit_electrode_waveforms.extend( [ { **unit, - **channel_info[c], - "waveform_mean": mean_waveforms[unit["unit"] - 1, :, c], + **channel2electrode_map[c], + "waveform_mean": mean_waveforms[unit["unit"] - 1, :, c_idx], } - for c in channel_info + for c_idx, c in enumerate(channel2electrode_map) ] ) self.insert1(key) self.PeakWaveform.insert(unit_peak_waveform) self.Waveform.insert(unit_electrode_waveforms) - else: kilosort_dataset = kilosort.Kilosort(output_dir) @@ -1390,12 +1376,12 @@ def yield_unit_waveforms(): unit_electrode_waveforms.append( { **units[unit_no], - **channel_info[channel], + **channel2electrode_map[channel], "waveform_mean": channel_waveform, } ) if ( - channel_info[channel]["electrode"] + channel2electrode_map[channel]["electrode"] == units[unit_no]["electrode"] ): unit_peak_waveform = { @@ -1405,7 +1391,6 @@ def yield_unit_waveforms(): yield unit_peak_waveform, unit_electrode_waveforms # Spike interface mean and peak waveform extraction from we object - elif len(waveforms_folder) > 0 & (waveforms_folder[0]).exists(): we_kilosort = si.load_waveforms(waveforms_folder[0].parent) unit_templates = we_kilosort.get_all_templates() @@ -1432,12 +1417,12 @@ def yield_unit_waveforms(): unit_electrode_waveforms.append( { **units[unit_no], - **channel_info[channel], + **channel2electrode_map[channel], "waveform_mean": channel_waveform, } ) if ( - channel_info[channel]["electrode"] + channel2electrode_map[channel]["electrode"] == units[unit_no]["electrode"] ): unit_peak_waveform = { @@ -1506,13 +1491,13 @@ def yield_unit_waveforms(): unit_electrode_waveforms.append( { **unit_dict, - **channel_info[channel], + **channel2electrode_map[channel], "waveform_mean": channel_waveform.mean(axis=0), "waveforms": channel_waveform, } ) if ( - channel_info[channel]["electrode"] + channel2electrode_map[channel]["electrode"] == unit_dict["electrode"] ): unit_peak_waveform = { @@ -1630,12 +1615,15 @@ def make(self, key): ClusteringTask * ClusteringParamSet & key ).fetch1("clustering_method", "clustering_output_dir") output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - sorter_name = ( - "kilosort2_5" if clustering_method == "kilosort2.5" else clustering_method - ) - metric_fp = output_dir / sorter_name / "metrics" / "metrics.csv" - if not metric_fp.exists(): - raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") + sorter_name = clustering_method.replace(".", "_") + + # find metric_fp + for metric_fp in [output_dir / "metrics.csv", output_dir / sorter_name / "metrics" / "metrics.csv"]: + if metric_fp.exists(): + break + else: + raise FileNotFoundError(f"QC metrics file not found in: {output_dir}") + metrics_df = pd.read_csv(metric_fp) # Conform the dataframe to match the table definition diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 306c1eb6..d14746fb 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -132,21 +132,18 @@ def make(self, key): ) # Add probe information to recording object - electrode_config_key = ( - probe.ElectrodeConfig * ephys.EphysRecording & key - ).fetch1("KEY") electrodes_df = ( ( - probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode - & electrode_config_key + ephys.EphysRecording.Channel * probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode + & key ) .fetch(format="frame") - .reset_index()[["electrode", "x_coord", "y_coord", "shank"]] + .reset_index() ) # Create SI probe object - si_probe = readers.probe_geometry.to_probeinterface(electrodes_df) - si_probe.set_device_channel_indices(range(len(electrodes_df))) + si_probe = readers.probe_geometry.to_probeinterface(electrodes_df[["electrode", "x_coord", "y_coord", "shank"]]) + si_probe.set_device_channel_indices(electrodes_df["channel_idx"].values) si_recording.set_probe(probe=si_probe, in_place=True) # Run preprocessing and save results to output folder From 015341c1127300e10e9011ec5d49a96abc3322f0 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 26 Apr 2024 17:23:29 -0500 Subject: [PATCH 115/204] feat: test spikeinterface for spikeglx data --- element_array_ephys/ephys_no_curation.py | 144 ++++++++---------- .../spike_sorting/si_preprocessing.py | 2 +- .../spike_sorting/si_spike_sorting.py | 8 +- 3 files changed, 72 insertions(+), 82 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 68251309..333a189a 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -352,24 +352,24 @@ def make(self, key): raise NotImplementedError( f"Processing for neuropixels probe model {spikeglx_meta.probe_model} not yet implemented." ) - else: - probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } # electrode configuration - electrode_group_members = [ - probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] - ] # recording session-specific electrode configuration - - econfig_entry, econfig_electrodes = generate_electrode_config_entry( - probe_type, electrode_group_members + probe_type = spikeglx_meta.probe_model + electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} + + probe_electrodes = { + (shank, shank_col, shank_row): key + for key, shank, shank_col, shank_row in zip( + *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") ) + } # electrode configuration + electrode_group_members = [ + probe_electrodes[(shank, shank_col, shank_row)] + for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] + ] # recording session-specific electrode configuration + + econfig_entry, econfig_electrodes = generate_electrode_config_entry( + probe_type, electrode_group_members + ) ephys_recording_entry = { **key, @@ -398,18 +398,6 @@ def make(self, key): # Insert channel information # Get channel and electrode-site mapping - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & {"electrode_config_hash": econfig_entry["electrode_config_hash"]} - ) - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - channel2electrode_map = { recorded_site: probe_electrodes[(shank, shank_col, shank_row)] for recorded_site, (shank, shank_col, shank_row, _) in enumerate( @@ -418,7 +406,12 @@ def make(self, key): } ephys_channel_entries = [ - {**key, "channel_idx": channel_idx, **channel_info} + { + **key, + "electrode_config_hash": econfig_entry["electrode_config_hash"], + "channel_idx": channel_idx, + **channel_info, + } for channel_idx, channel_info in channel2electrode_map.items() ] elif acq_software == "Open Ephys": @@ -438,24 +431,24 @@ def make(self, key): if probe_data.probe_model not in supported_probe_types: raise NotImplementedError( - f"Processing for neuropixels probe model {spikeglx_meta.probe_model} not yet implemented." + f"Processing for neuropixels probe model {probe_data.probe_model} not yet implemented." ) - else: - probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } # electrode configuration + probe_type = probe_data.probe_model + electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - electrode_group_members = [ - probe_electrodes[channel_idx] - for channel_idx in probe_data.ap_meta["channels_indices"] - ] # recording session-specific electrode configuration + probe_electrodes = { + key["electrode"]: key for key in electrode_query.fetch("KEY") + } # electrode configuration - econfig_entry, econfig_electrodes = generate_electrode_config_entry( - probe_type, electrode_group_members - ) + electrode_group_members = [ + probe_electrodes[channel_idx] + for channel_idx in probe_data.ap_meta["channels_indices"] + ] # recording session-specific electrode configuration + + econfig_entry, econfig_electrodes = generate_electrode_config_entry( + probe_type, electrode_group_members + ) ephys_recording_entry = { **key, @@ -480,29 +473,24 @@ def make(self, key): for fp in probe_data.recording_info["recording_files"] ] - # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough - del probe_data, dataset - gc.collect() - - probe_dataset = get_openephys_probe_data(key) - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } channel2electrode_map = { channel_idx: probe_electrodes[channel_idx] - for channel_idx in probe_dataset.ap_meta["channels_indices"] + for channel_idx in probe_data.ap_meta["channels_indices"] } ephys_channel_entries = [ - {**key, "channel_idx": channel_idx, **channel_info} + { + **key, + "electrode_config_hash": econfig_entry["electrode_config_hash"], + "channel_idx": channel_idx, + **channel_info, + } for channel_idx, channel_info in channel2electrode_map.items() ] + + # Explicitly garbage collect "dataset" as these may have large memory footprint and may not be cleared fast enough + del probe_data, dataset + gc.collect() else: raise NotImplementedError( f"Processing ephys files from acquisition software of type {acq_software} is not yet implemented." @@ -1041,10 +1029,7 @@ def make(self, key): output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) # Get channel and electrode-site mapping - electrode_query = ( - (EphysRecording.Channel & key) - .proj(..., "-channel_name") - ) + electrode_query = (EphysRecording.Channel & key).proj(..., "-channel_name") channel2electrode_map = electrode_query.fetch(as_dict=True) channel2electrode_map: dict[int, dict] = { chn.pop("channel_idx"): chn for chn in channel2electrode_map @@ -1058,7 +1043,9 @@ def make(self, key): if si_waveform_dir.exists(): # Read from spikeinterface outputs - we: si.WaveformExtractor = si.load_waveforms(si_waveform_dir, with_recording=False) + we: si.WaveformExtractor = si.load_waveforms( + si_waveform_dir, with_recording=False + ) si_sorting: si.sorters.BaseSorter = si.load_extractor( si_sorting_dir / "si_sorting.pkl" ) @@ -1139,10 +1126,10 @@ def make(self, key): "spike_count": spike_count_dict[unit_id], "spike_sites": new_spikes["electrode"][ new_spikes["unit_index"] == unit_id - ], + ], "spike_depths": new_spikes["depth"][ new_spikes["unit_index"] == unit_id - ], + ], } ) @@ -1281,10 +1268,7 @@ def make(self, key): sorter_name = clustering_method.replace(".", "_") # Get channel and electrode-site mapping - electrode_query = ( - (EphysRecording.Channel & key) - .proj(..., "-channel_name") - ) + electrode_query = (EphysRecording.Channel & key).proj(..., "-channel_name") channel2electrode_map = electrode_query.fetch(as_dict=True) channel2electrode_map: dict[int, dict] = { chn.pop("channel_idx"): chn for chn in channel2electrode_map @@ -1292,12 +1276,14 @@ def make(self, key): si_waveform_dir = output_dir / sorter_name / "waveform" if si_waveform_dir.exists(): # read from spikeinterface outputs - we: si.WaveformExtractor = si.load_waveforms(si_waveform_dir, with_recording=False) - unit_id_to_peak_channel_map: dict[ - int, np.ndarray - ] = si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices # {unit: peak_channel_index} + we: si.WaveformExtractor = si.load_waveforms( + si_waveform_dir, with_recording=False + ) + unit_id_to_peak_channel_map: dict[int, np.ndarray] = ( + si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices + ) # {unit: peak_channel_index} # reorder channel2electrode_map according to recording channel ids channel2electrode_map = { @@ -1391,6 +1377,7 @@ def yield_unit_waveforms(): yield unit_peak_waveform, unit_electrode_waveforms # Spike interface mean and peak waveform extraction from we object + elif len(waveforms_folder) > 0 & (waveforms_folder[0]).exists(): we_kilosort = si.load_waveforms(waveforms_folder[0].parent) unit_templates = we_kilosort.get_all_templates() @@ -1618,7 +1605,10 @@ def make(self, key): sorter_name = clustering_method.replace(".", "_") # find metric_fp - for metric_fp in [output_dir / "metrics.csv", output_dir / sorter_name / "metrics" / "metrics.csv"]: + for metric_fp in [ + output_dir / "metrics.csv", + output_dir / sorter_name / "metrics" / "metrics.csv", + ]: if metric_fp.exists(): break else: diff --git a/element_array_ephys/spike_sorting/si_preprocessing.py b/element_array_ephys/spike_sorting/si_preprocessing.py index 4db5f303..22adbdca 100644 --- a/element_array_ephys/spike_sorting/si_preprocessing.py +++ b/element_array_ephys/spike_sorting/si_preprocessing.py @@ -2,7 +2,7 @@ from spikeinterface import preprocessing -def catGT(recording): +def CatGT(recording): recording = si.preprocessing.phase_shift(recording) recording = si.preprocessing.common_reference( recording, operator="median", reference="global" diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index d14746fb..c1a906ea 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -127,7 +127,7 @@ def make(self, key): stream_names, stream_ids = si.extractors.get_neo_streams( acq_software, folder_path=data_dir ) - si_recording: si.BaseRecording = si_extractor[acq_software]( + si_recording: si.BaseRecording = si_extractor( folder_path=data_dir, stream_name=stream_names[0] ) @@ -184,7 +184,7 @@ def make(self, key): output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) sorter_name = clustering_method.replace(".", "_") recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" - si_recording: si.BaseRecording = si.load_extractor(recording_file) + si_recording: si.BaseRecording = si.load_extractor(recording_file, base_folder=output_dir) # Run sorting # Sorting performed in a dedicated docker environment if the sorter is not built in the spikeinterface package. @@ -241,8 +241,8 @@ def make(self, key): recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" - si_recording: si.BaseRecording = si.load_extractor(recording_file) - si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file) + si_recording: si.BaseRecording = si.load_extractor(recording_file, base_folder=output_dir) + si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file, base_folder=output_dir) # Extract waveforms we: si.WaveformExtractor = si.extract_waveforms( From 05ccfdb80cee7418e58322ebb3bbb9f4a1df6b8e Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Mon, 29 Apr 2024 11:46:32 -0500 Subject: [PATCH 116/204] fix: update ingestion from spikeinterface results --- element_array_ephys/ephys_no_curation.py | 137 +++++------------------ 1 file changed, 27 insertions(+), 110 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 333a189a..0cf2021c 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1040,18 +1040,16 @@ def make(self, key): si_waveform_dir = output_dir / sorter_name / "waveform" si_sorting_dir = output_dir / sorter_name / "spike_sorting" - if si_waveform_dir.exists(): - - # Read from spikeinterface outputs + if si_waveform_dir.exists(): # Read from spikeinterface outputs we: si.WaveformExtractor = si.load_waveforms( si_waveform_dir, with_recording=False ) si_sorting: si.sorters.BaseSorter = si.load_extractor( - si_sorting_dir / "si_sorting.pkl" + si_sorting_dir / "si_sorting.pkl", base_folder=output_dir ) unit_peak_channel: dict[int, int] = si.get_template_extremum_channel( - we, outputs="id" + we, outputs="index" ) # {unit: peak_channel_id} spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() @@ -1061,7 +1059,8 @@ def make(self, key): # reorder channel2electrode_map according to recording channel ids channel2electrode_map = { - chn_id: channel2electrode_map[int(chn_id)] for chn_id in we.channel_ids + chn_idx: channel2electrode_map[chn_idx] + for chn_idx in we.channel_ids_to_indices(we.channel_ids) } # Get unit id to quality label mapping @@ -1090,7 +1089,7 @@ def make(self, key): # Get channel depth channel_depth_ind = np.array( [ - channel2electrode_map[unit_peak_channel[unit_id]]["y_coord"] + we.get_probe().contact_positions[unit_peak_channel[unit_id]][1] for unit_id in si_sorting.unit_ids ] ) @@ -1132,7 +1131,6 @@ def make(self, key): ], } ) - else: # read from kilosort outputs kilosort_dataset = kilosort.Kilosort(output_dir) acq_software, sample_rate = (EphysRecording & key).fetch1( @@ -1286,46 +1284,38 @@ def make(self, key): ) # {unit: peak_channel_index} # reorder channel2electrode_map according to recording channel ids + channel_indices = we.channel_ids_to_indices(we.channel_ids).tolist() channel2electrode_map = { - chn_id: channel2electrode_map[int(chn_id)] for chn_id in we.channel_ids + chn_idx: channel2electrode_map[chn_idx] for chn_idx in channel_indices } - # Get mean waveform for each unit from all channels - mean_waveforms = we.get_all_templates( - mode="average" - ) # (unit x sample x channel) - - unit_peak_waveform = [] - unit_electrode_waveforms = [] - - for unit in (CuratedClustering.Unit & key).fetch("KEY", order_by="unit"): - unit_waveforms = we.get_template( - unit_id=unit["unit"], mode="average", force_dense=True - ) # (sample x channel) - peak_chn_idx = list(we.channel_ids).index( - unit_id_to_peak_channel_map[unit["unit"]][0] - ) - unit_peak_waveform.append( - { + def yield_unit_waveforms(): + for unit in (CuratedClustering.Unit & key).fetch( + "KEY", order_by="unit" + ): + # Get mean waveform for this unit from all channels - (sample x channel) + unit_waveforms = we.get_template( + unit_id=unit["unit"], mode="average", force_dense=True + ) + peak_chn_idx = channel_indices.index( + unit_id_to_peak_channel_map[unit["unit"]][0] + ) + unit_peak_waveform = { **unit, "peak_electrode_waveform": unit_waveforms[:, peak_chn_idx], } - ) - unit_electrode_waveforms.extend( - [ + + unit_electrode_waveforms = [ { **unit, - **channel2electrode_map[c], - "waveform_mean": mean_waveforms[unit["unit"] - 1, :, c_idx], + **channel2electrode_map[chn_idx], + "waveform_mean": unit_waveforms[:, chn_idx], } - for c_idx, c in enumerate(channel2electrode_map) + for chn_idx in channel_indices ] - ) - self.insert1(key) - self.PeakWaveform.insert(unit_peak_waveform) - self.Waveform.insert(unit_electrode_waveforms) - else: + yield unit_peak_waveform, unit_electrode_waveforms + else: # read from kilosort outputs kilosort_dataset = kilosort.Kilosort(output_dir) acq_software, probe_serial_number = ( @@ -1340,10 +1330,6 @@ def make(self, key): ) } - waveforms_folder = [ - f for f in output_dir.parent.rglob(r"*/waveforms*") if f.is_dir() - ] - if (output_dir / "mean_waveforms.npy").exists(): unit_waveforms = np.load( output_dir / "mean_waveforms.npy" @@ -1376,75 +1362,6 @@ def yield_unit_waveforms(): } yield unit_peak_waveform, unit_electrode_waveforms - # Spike interface mean and peak waveform extraction from we object - - elif len(waveforms_folder) > 0 & (waveforms_folder[0]).exists(): - we_kilosort = si.load_waveforms(waveforms_folder[0].parent) - unit_templates = we_kilosort.get_all_templates() - unit_waveforms = np.reshape( - unit_templates, - ( - unit_templates.shape[1], - unit_templates.shape[3], - unit_templates.shape[2], - ), - ) - - # Approach assumes unit_waveforms was generated correctly (templates are actually the same as mean_waveforms) - def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - if unit_no in units: - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], unit_waveform - ): - unit_electrode_waveforms.append( - { - **units[unit_no], - **channel2electrode_map[channel], - "waveform_mean": channel_waveform, - } - ) - if ( - channel2electrode_map[channel]["electrode"] - == units[unit_no]["electrode"] - ): - unit_peak_waveform = { - **units[unit_no], - "peak_electrode_waveform": channel_waveform, - } - yield unit_peak_waveform, unit_electrode_waveforms - - # Approach not using spike interface templates (ie. taking mean of each unit waveform) - # def yield_unit_waveforms(): - # for unit_id in we_kilosort.unit_ids: - # unit_waveform = np.mean(we_kilosort.get_waveforms(unit_id), 0) - # unit_peak_waveform = {} - # unit_electrode_waveforms = [] - # if unit_id in units: - # for channel, channel_waveform in zip( - # kilosort_dataset.data["channel_map"], unit_waveform - # ): - # unit_electrode_waveforms.append( - # { - # **units[unit_id], - # **channel2electrodes[channel], - # "waveform_mean": channel_waveform, - # } - # ) - # if ( - # channel2electrodes[channel]["electrode"] - # == units[unit_id]["electrode"] - # ): - # unit_peak_waveform = { - # **units[unit_id], - # "peak_electrode_waveform": channel_waveform, - # } - # yield unit_peak_waveform, unit_electrode_waveforms - else: if acq_software == "SpikeGLX": spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) From 93895a965471902b3a3aa5448c7648ce09432928 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Wed, 8 May 2024 00:23:28 +0200 Subject: [PATCH 117/204] Refactor Quality Metrics Logic + blackformatting --- element_array_ephys/ephys_no_curation.py | 22 +++-- .../spike_sorting/si_spike_sorting.py | 84 +++++++++++++------ 2 files changed, 73 insertions(+), 33 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 0cf2021c..b0a8bc26 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1277,11 +1277,11 @@ def make(self, key): we: si.WaveformExtractor = si.load_waveforms( si_waveform_dir, with_recording=False ) - unit_id_to_peak_channel_map: dict[int, np.ndarray] = ( - si.ChannelSparsity.from_best_channels( - we, 1, peak_sign="neg" - ).unit_id_to_channel_indices - ) # {unit: peak_channel_index} + unit_id_to_peak_channel_map: dict[ + int, np.ndarray + ] = si.ChannelSparsity.from_best_channels( + we, 1, peak_sign="neg" + ).unit_id_to_channel_indices # {unit: peak_channel_index} # reorder channel2electrode_map according to recording channel ids channel_indices = we.channel_ids_to_indices(we.channel_ids).tolist() @@ -1315,6 +1315,7 @@ def yield_unit_waveforms(): ] yield unit_peak_waveform, unit_electrode_waveforms + else: # read from kilosort outputs kilosort_dataset = kilosort.Kilosort(output_dir) @@ -1546,9 +1547,14 @@ def make(self, key): metrics_df.rename( columns={ - "isi_viol": "isi_violation", - "num_viol": "number_violation", - "contam_rate": "contamination_rate", + "isi_violations_ratio": "isi_violation", + "isi_violations_count": "number_violation", + "silhouette": "silhouette_score", + "rp_contamination": "contamination_rate", + "drift_ptp": "max_drift", + "drift_mad": "cumulative_drift", + "half_width": "halfwidth", + "peak_trough_ratio": "pt_ratio", }, inplace=True, ) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index c1a906ea..94f12f84 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -134,7 +134,9 @@ def make(self, key): # Add probe information to recording object electrodes_df = ( ( - ephys.EphysRecording.Channel * probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode + ephys.EphysRecording.Channel + * probe.ElectrodeConfig.Electrode + * probe.ProbeType.Electrode & key ) .fetch(format="frame") @@ -142,7 +144,9 @@ def make(self, key): ) # Create SI probe object - si_probe = readers.probe_geometry.to_probeinterface(electrodes_df[["electrode", "x_coord", "y_coord", "shank"]]) + si_probe = readers.probe_geometry.to_probeinterface( + electrodes_df[["electrode", "x_coord", "y_coord", "shank"]] + ) si_probe.set_device_channel_indices(electrodes_df["channel_idx"].values) si_recording.set_probe(probe=si_probe, in_place=True) @@ -184,7 +188,9 @@ def make(self, key): output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) sorter_name = clustering_method.replace(".", "_") recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" - si_recording: si.BaseRecording = si.load_extractor(recording_file, base_folder=output_dir) + si_recording: si.BaseRecording = si.load_extractor( + recording_file, base_folder=output_dir + ) # Run sorting # Sorting performed in a dedicated docker environment if the sorter is not built in the spikeinterface package. @@ -241,8 +247,12 @@ def make(self, key): recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" - si_recording: si.BaseRecording = si.load_extractor(recording_file, base_folder=output_dir) - si_sorting: si.sorters.BaseSorter = si.load_extractor(sorting_file, base_folder=output_dir) + si_recording: si.BaseRecording = si.load_extractor( + recording_file, base_folder=output_dir + ) + si_sorting: si.sorters.BaseSorter = si.load_extractor( + sorting_file, base_folder=output_dir + ) # Extract waveforms we: si.WaveformExtractor = si.extract_waveforms( @@ -257,27 +267,46 @@ def make(self, key): **params.get("SI_JOB_KWARGS", {"n_jobs": -1, "chunk_size": 30000}), ) - # Calculate QC Metrics - metrics: pd.DataFrame = si.qualitymetrics.compute_quality_metrics( - we, - metric_names=[ - "firing_rate", - "snr", - "presence_ratio", - "isi_violation", - "num_spikes", - "amplitude_cutoff", - "amplitude_median", - "sliding_rp_violation", - "rp_violation", - "drift", - ], - ) - # Add PCA based metrics. These will be added to the metrics dataframe above. + # Calculate Cluster and Waveform Metrics + + # To provide waveform_principal_component _ = si.postprocessing.compute_principal_components( waveform_extractor=we, **params.get("SI_QUALITY_METRICS_PARAMS", None) ) - metrics = si.qualitymetrics.compute_quality_metrics(waveform_extractor=we) + + # To estimate the location of each spike in the sorting output. + # The drift metrics require the `spike_locations` waveform extension. + _ = si.postprocessing.compute_spike_locations(waveform_extractor=we) + + # The `sd_ratio` metric requires the `spike_amplitudes` waveform extension. + # It is highly recommended before calculating amplitude-based quality metrics. + _ = si.postprocessing.compute_spike_amplitudes(waveform_extractor=we) + + # To compute correlograms for spike trains. + _ = si.postprocessing.compute_correlograms(we) + + metric_names = si.qualitymetrics.get_quality_metric_list() + metric_names.extend(si.qualitymetrics.get_quality_pca_metric_list()) + + # To compute commonly used cluster quality metrics. + qc_metrics = si.qualitymetrics.compute_quality_metrics( + waveform_extractor=we, + metric_names=metric_names, + ) + + # To compute commonly used waveform/template metrics. + template_metric_names = si.postprocessing.get_template_metric_names() + template_metric_names.extend(["amplitude", "duration"]) + + template_metrics = si.postprocessing.compute_template_metrics( + waveform_extractor=we, + include_multi_channel_metrics=True, + metric_names=template_metric_names, + ) + + # Save the output (metrics.csv to the output dir) + metrics = pd.DataFrame() + metrics = pd.concat([qc_metrics, template_metrics], axis=1) # Save the output (metrics.csv to the output dir) metrics_output_dir = output_dir / sorter_name / "metrics" @@ -285,9 +314,14 @@ def make(self, key): metrics.to_csv(metrics_output_dir / "metrics.csv") # Save to phy format - si.exporters.export_to_phy(waveform_extractor=we, output_folder=output_dir / sorter_name / "phy") + si.exporters.export_to_phy( + waveform_extractor=we, output_folder=output_dir / sorter_name / "phy" + ) # Generate spike interface report - si.exporters.export_report(waveform_extractor=we, output_folder=output_dir / sorter_name / "spikeinterface_report") + si.exporters.export_report( + waveform_extractor=we, + output_folder=output_dir / sorter_name / "spikeinterface_report", + ) self.insert1( { From bd3bb8e9eccb7df3f44fce7398549325f994dec8 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 8 May 2024 10:34:09 -0500 Subject: [PATCH 118/204] Update si_spike_sorting.py --- element_array_ephys/spike_sorting/si_spike_sorting.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index c1a906ea..1aea4ad0 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -42,6 +42,7 @@ def activate( create_tables=create_tables, add_objects=ephys.__dict__, ) + ephys.Clustering.key_source -= PreProcessing.key_source.proj() SI_SORTERS = [s.replace("_", ".") for s in si.sorters.sorter_dict.keys()] From 403d1df30c18eb63f84b200ea8a861c59d9d6ac5 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Thu, 9 May 2024 18:57:31 +0200 Subject: [PATCH 119/204] update `postprocessing` logic --- element_array_ephys/spike_sorting/si_spike_sorting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 94f12f84..4c90337e 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -286,7 +286,7 @@ def make(self, key): _ = si.postprocessing.compute_correlograms(we) metric_names = si.qualitymetrics.get_quality_metric_list() - metric_names.extend(si.qualitymetrics.get_quality_pca_metric_list()) + metric_names.extend(si.qualitymetrics.get_quality_pca_metric_list()) # To compute commonly used cluster quality metrics. qc_metrics = si.qualitymetrics.compute_quality_metrics( @@ -308,7 +308,7 @@ def make(self, key): metrics = pd.DataFrame() metrics = pd.concat([qc_metrics, template_metrics], axis=1) - # Save the output (metrics.csv to the output dir) + # Save metrics.csv to the output dir metrics_output_dir = output_dir / sorter_name / "metrics" metrics_output_dir.mkdir(parents=True, exist_ok=True) metrics.to_csv(metrics_output_dir / "metrics.csv") From c934e67ea6e5de2e30b35dbc10ab547e49917159 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 24 May 2024 11:00:06 -0500 Subject: [PATCH 120/204] feat: prototyping with the new `sorting_analyzer` --- .../spike_sorting/si_spike_sorting.py | 27 +++++++++++++++++-- setup.py | 2 +- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index ab803490..f7cb1e57 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -255,6 +255,29 @@ def make(self, key): sorting_file, base_folder=output_dir ) + # Sorting Analyzer + analyzer_output_dir = output_dir / sorter_name / "sorting_analyzer" + if analyzer_output_dir.exists(): + sorting_analyzer = si.load_sorting_analyzer(folder=analyzer_output_dir) + else: + sorting_analyzer = si.create_sorting_analyzer( + sorting=si_sorting, + recording=si_recording, + format="binary_folder", + folder=analyzer_output_dir, + sparse=True, + overwrite=True, + ) + + job_kwargs = params.get("SI_JOB_KWARGS", {"n_jobs": -1, "chunk_duration": "1s"}) + all_computable_extensions = ['random_spikes', 'waveforms', 'templates', 'noise_levels', 'amplitude_scalings', 'correlograms', 'isi_histograms', 'principal_components', 'spike_amplitudes', 'spike_locations', 'template_metrics', 'template_similarity', 'unit_locations', 'quality_metrics'] + extensions_to_compute = ['random_spikes', 'waveforms', 'templates', 'noise_levels', + 'spike_amplitudes', 'spike_locations', 'unit_locations', + 'principal_components', + 'template_metrics', 'quality_metrics'] + + sorting_analyzer.compute(extensions_to_compute, **job_kwargs) + # Extract waveforms we: si.WaveformExtractor = si.extract_waveforms( si_recording, @@ -287,7 +310,7 @@ def make(self, key): _ = si.postprocessing.compute_correlograms(we) metric_names = si.qualitymetrics.get_quality_metric_list() - metric_names.extend(si.qualitymetrics.get_quality_pca_metric_list()) + # metric_names.extend(si.qualitymetrics.get_quality_pca_metric_list()) # TODO: temporarily removed # To compute commonly used cluster quality metrics. qc_metrics = si.qualitymetrics.compute_quality_metrics( @@ -297,7 +320,7 @@ def make(self, key): # To compute commonly used waveform/template metrics. template_metric_names = si.postprocessing.get_template_metric_names() - template_metric_names.extend(["amplitude", "duration"]) + template_metric_names.extend(["amplitude", "duration"]) # TODO: does this do anything? template_metrics = si.postprocessing.compute_template_metrics( waveform_extractor=we, diff --git a/setup.py b/setup.py index 52cd38b1..e62719d8 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ "openpyxl", "plotly", "seaborn", - "spikeinterface", + "spikeinterface>=0.101.0", "scikit-image", "nbformat>=4.2.0", "pyopenephys>=1.1.6", From 3666cda077448cc40d7b7e9c219c9c489396cbd6 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 24 May 2024 14:34:05 -0500 Subject: [PATCH 121/204] feat: update ingestion to be compatible with spikeinterface 0.101+ --- element_array_ephys/ephys_no_curation.py | 209 ++++++++---------- .../spike_sorting/si_spike_sorting.py | 93 ++------ 2 files changed, 116 insertions(+), 186 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index b0a8bc26..413868da 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1037,98 +1037,69 @@ def make(self, key): # Get sorter method and create output directory. sorter_name = clustering_method.replace(".", "_") - si_waveform_dir = output_dir / sorter_name / "waveform" - si_sorting_dir = output_dir / sorter_name / "spike_sorting" + si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" - if si_waveform_dir.exists(): # Read from spikeinterface outputs - we: si.WaveformExtractor = si.load_waveforms( - si_waveform_dir, with_recording=False - ) - si_sorting: si.sorters.BaseSorter = si.load_extractor( - si_sorting_dir / "si_sorting.pkl", base_folder=output_dir - ) + if si_sorting_analyzer_dir.exists(): # Read from spikeinterface outputs + sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) + si_sorting = sorting_analyzer.sorting - unit_peak_channel: dict[int, int] = si.get_template_extremum_channel( - we, outputs="index" - ) # {unit: peak_channel_id} + # Find representative channel for each unit + unit_peak_channel: dict[int, np.ndarray] = ( + si.ChannelSparsity.from_best_channels( + sorting_analyzer, 1, peak_sign="neg" + ).unit_id_to_channel_indices + ) # {unit: peak_channel_index} + unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()} spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() # {unit: spike_count} - spikes = si_sorting.to_spike_vector() - # reorder channel2electrode_map according to recording channel ids channel2electrode_map = { chn_idx: channel2electrode_map[chn_idx] - for chn_idx in we.channel_ids_to_indices(we.channel_ids) + for chn_idx in sorting_analyzer.channel_ids_to_indices( + sorting_analyzer.channel_ids + ) } # Get unit id to quality label mapping - try: - cluster_quality_label_map = pd.read_csv( - si_sorting_dir / "sorter_output" / "cluster_KSLabel.tsv", - delimiter="\t", + cluster_quality_label_map = { + int(unit_id): ( + si_sorting.get_unit_property(unit_id, "KSLabel") + if "KSLabel" in si_sorting.get_property_keys() + else "n.a." ) - except FileNotFoundError: - cluster_quality_label_map = {} - else: - cluster_quality_label_map: dict[ - int, str - ] = cluster_quality_label_map.set_index("cluster_id")[ - "KSLabel" - ].to_dict() # {unit: quality_label} - - # Get electrode where peak unit activity is recorded - peak_electrode_ind = np.array( - [ - channel2electrode_map[unit_peak_channel[unit_id]]["electrode"] - for unit_id in si_sorting.unit_ids - ] - ) - - # Get channel depth - channel_depth_ind = np.array( - [ - we.get_probe().contact_positions[unit_peak_channel[unit_id]][1] - for unit_id in si_sorting.unit_ids - ] - ) - - # Assign electrode and depth for each spike - new_spikes = np.empty( - spikes.shape, - spikes.dtype.descr + [("electrode", " Date: Fri, 24 May 2024 14:52:45 -0500 Subject: [PATCH 122/204] format: black formatting --- element_array_ephys/ephys_no_curation.py | 10 +++++++--- .../spike_sorting/si_spike_sorting.py | 19 ++++++++++++------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 413868da..99247e35 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1256,7 +1256,9 @@ def make(self, key): unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()} # reorder channel2electrode_map according to recording channel ids - channel_indices = sorting_analyzer.channel_ids_to_indices(sorting_analyzer.channel_ids).tolist() + channel_indices = sorting_analyzer.channel_ids_to_indices( + sorting_analyzer.channel_ids + ).tolist() channel2electrode_map = { chn_idx: channel2electrode_map[chn_idx] for chn_idx in channel_indices } @@ -1500,7 +1502,9 @@ def make(self, key): if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) qc_metrics = sorting_analyzer.get_extension("quality_metrics").get_data() - template_metrics = sorting_analyzer.get_extension("template_metrics").get_data() + template_metrics = sorting_analyzer.get_extension( + "template_metrics" + ).get_data() metrics_df = pd.concat([qc_metrics, template_metrics], axis=1) metrics_df.rename( @@ -1514,7 +1518,7 @@ def make(self, key): "drift_mad": "cumulative_drift", "half_width": "halfwidth", "peak_trough_ratio": "pt_ratio", - "peak_to_valley": "duration" + "peak_to_valley": "duration", }, inplace=True, ) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 55c6efdd..33201d86 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -270,28 +270,33 @@ def make(self, key): overwrite=True, ) - job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get("job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"}) + job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get( + "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} + ) extensions_params = params["SI_POSTPROCESSING_PARAMS"].get("extensions", {}) # The order of extension computation is drawn from sorting_analyzer.get_computable_extensions() # each extension is parameterized by params specified in extensions_params dictionary (skip if not specified) - extensions_to_compute = {ext_name: extensions_params[ext_name] - for ext_name in sorting_analyzer.get_computable_extensions() - if ext_name in extensions_params} + extensions_to_compute = { + ext_name: extensions_params[ext_name] + for ext_name in sorting_analyzer.get_computable_extensions() + if ext_name in extensions_params + } sorting_analyzer.compute(extensions_to_compute, **job_kwargs) # Save to phy format if params["SI_POSTPROCESSING_PARAMS"].get("export_to_phy", False): si.exporters.export_to_phy( - sorting_analyzer=sorting_analyzer, output_folder=output_dir / sorter_name / "phy", - **job_kwargs + sorting_analyzer=sorting_analyzer, + output_folder=output_dir / sorter_name / "phy", + **job_kwargs, ) # Generate spike interface report if params["SI_POSTPROCESSING_PARAMS"].get("export_report", True): si.exporters.export_report( sorting_analyzer=sorting_analyzer, output_folder=output_dir / sorter_name / "spikeinterface_report", - **job_kwargs + **job_kwargs, ) self.insert1( From 07a09f6152b9632ce713287a85dedd0ad1bf8e9b Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 24 May 2024 15:28:52 -0500 Subject: [PATCH 123/204] chore: code clean up --- .../spike_sorting/si_spike_sorting.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 33201d86..a0ff2035 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -80,11 +80,9 @@ def make(self, key): sorter_name = clustering_method.replace(".", "_") for required_key in ( - "SI_SORTING_PARAMS", "SI_PREPROCESSING_METHOD", + "SI_SORTING_PARAMS", "SI_POSTPROCESSING_PARAMS", - "SI_WAVEFORM_EXTRACTION_PARAMS", - "SI_QUALITY_METRICS_PARAMS", ): if required_key not in params: raise ValueError( @@ -256,6 +254,10 @@ def make(self, key): sorting_file, base_folder=output_dir ) + job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get( + "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} + ) + # Sorting Analyzer analyzer_output_dir = output_dir / sorter_name / "sorting_analyzer" if (analyzer_output_dir / "extensions").exists(): @@ -268,14 +270,12 @@ def make(self, key): folder=analyzer_output_dir, sparse=True, overwrite=True, + **job_kwargs ) - job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get( - "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} - ) - extensions_params = params["SI_POSTPROCESSING_PARAMS"].get("extensions", {}) # The order of extension computation is drawn from sorting_analyzer.get_computable_extensions() # each extension is parameterized by params specified in extensions_params dictionary (skip if not specified) + extensions_params = params["SI_POSTPROCESSING_PARAMS"].get("extensions", {}) extensions_to_compute = { ext_name: extensions_params[ext_name] for ext_name in sorting_analyzer.get_computable_extensions() From 3fcf542d1435f4f891f2bbf93eaa3668da1986ea Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 24 May 2024 15:29:09 -0500 Subject: [PATCH 124/204] update: update requirements to install `SpikeInterface` from github (latest version) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e62719d8..f1ba9c90 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ "openpyxl", "plotly", "seaborn", - "spikeinterface>=0.101.0", + "spikeinterface @ git+https://github.com/SpikeInterface/spikeinterface.git", "scikit-image", "nbformat>=4.2.0", "pyopenephys>=1.1.6", From 76dfc94568bf28296da18905d0b187588bc99397 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 10:32:19 -0500 Subject: [PATCH 125/204] fix: minor bug in spikes ingestion --- element_array_ephys/ephys_no_curation.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 99247e35..9222ccd2 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1048,8 +1048,8 @@ def make(self, key): si.ChannelSparsity.from_best_channels( sorting_analyzer, 1, peak_sign="neg" ).unit_id_to_channel_indices - ) # {unit: peak_channel_index} - unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()} + ) + unit_peak_channel: dict[int, int] = {u: chn[0] for u, chn in unit_peak_channel.items()} spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() # {unit: spike_count} @@ -1076,9 +1076,9 @@ def make(self, key): spikes_df = pd.DataFrame(spike_locations.spikes) units = [] - for unit_id in si_sorting.unit_ids: + for unit_idx, unit_id in enumerate(si_sorting.unit_ids): unit_id = int(unit_id) - unit_spikes_df = spikes_df[spikes_df.unit_index == unit_id] + unit_spikes_df = spikes_df[spikes_df.unit_index == unit_idx] spike_sites = np.array( [ channel2electrode_map[chn_idx]["electrode"] @@ -1087,6 +1087,9 @@ def make(self, key): ) unit_spikes_loc = spike_locations.get_data()[unit_spikes_df.index] _, spike_depths = zip(*unit_spikes_loc) # x-coordinates, y-coordinates + spike_times = si_sorting.get_unit_spike_train(unit_id, return_times=True) + + assert len(spike_times) == len(spike_sites) == len(spike_depths) units.append( { @@ -1094,9 +1097,7 @@ def make(self, key): **channel2electrode_map[unit_peak_channel[unit_id]], "unit": unit_id, "cluster_quality_label": cluster_quality_label_map[unit_id], - "spike_times": si_sorting.get_unit_spike_train( - unit_id, return_times=True - ), + "spike_times": spike_times, "spike_count": spike_count_dict[unit_id], "spike_sites": spike_sites, "spike_depths": spike_depths, From 9094754b6f23bd65a71390094ac509e06d22b34c Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 10:38:59 -0500 Subject: [PATCH 126/204] update: bump version --- CHANGELOG.md | 5 +++++ element_array_ephys/version.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5e45e427..5d81dcba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,11 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. +## [0.4.0] - 2024-05-28 + ++ Add - support for SpikeInterface version >= 0.101.0 (updated API) + + ## [0.3.4] - 2024-03-22 + Add - pytest diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 148bac24..2e6de55a 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.3.4" +__version__ = "0.4.0" From 51e2ced3f36fa1b69bacf69ea1fbf295c84eaf16 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 13:14:00 -0500 Subject: [PATCH 127/204] feat: add `memoized_result` on spike sorting --- CHANGELOG.md | 1 + .../spike_sorting/si_spike_sorting.py | 103 ++++++++++-------- setup.py | 2 +- 3 files changed, 60 insertions(+), 46 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d81dcba..cd8bb5b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,7 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and ## [0.4.0] - 2024-05-28 + Add - support for SpikeInterface version >= 0.101.0 (updated API) ++ Add - feature for memoization of spike sorting results (prevent duplicated runs) ## [0.3.4] - 2024-03-22 diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index a0ff2035..dff74dd7 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -8,7 +8,7 @@ import pandas as pd import spikeinterface as si from element_array_ephys import probe, readers -from element_interface.utils import find_full_path +from element_interface.utils import find_full_path, memoized_result from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import si_preprocessing @@ -192,23 +192,29 @@ def make(self, key): recording_file, base_folder=output_dir ) + sorting_params = params["SI_SORTING_PARAMS"] + sorting_output_dir = output_dir / sorter_name / "spike_sorting" + # Run sorting - # Sorting performed in a dedicated docker environment if the sorter is not built in the spikeinterface package. - si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( - sorter_name=sorter_name, - recording=si_recording, - output_folder=output_dir / sorter_name / "spike_sorting", - remove_existing_folder=True, - verbose=True, - docker_image=sorter_name not in si.sorters.installed_sorters(), - **params.get("SI_SORTING_PARAMS", {}), + @memoized_result( + uniqueness_dict=sorting_params, + output_directory=sorting_output_dir, ) + def _run_sorter(): + # Sorting performed in a dedicated docker environment if the sorter is not built in the spikeinterface package. + si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( + sorter_name=sorter_name, + recording=si_recording, + output_folder=sorting_output_dir, + remove_existing_folder=True, + verbose=True, + docker_image=sorter_name not in si.sorters.installed_sorters(), + **sorting_params, + ) - # Save sorting object - sorting_save_path = ( - output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" - ) - si_sorting.dump_to_pickle(sorting_save_path, relative_to=output_dir) + # Save sorting object + sorting_save_path = sorting_output_dir / "si_sorting.pkl" + si_sorting.dump_to_pickle(sorting_save_path, relative_to=output_dir) self.insert1( { @@ -254,15 +260,20 @@ def make(self, key): sorting_file, base_folder=output_dir ) - job_kwargs = params["SI_POSTPROCESSING_PARAMS"].get( + postprocessing_params = params["SI_POSTPROCESSING_PARAMS"] + + job_kwargs = postprocessing_params.get( "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} ) - # Sorting Analyzer analyzer_output_dir = output_dir / sorter_name / "sorting_analyzer" - if (analyzer_output_dir / "extensions").exists(): - sorting_analyzer = si.load_sorting_analyzer(folder=analyzer_output_dir) - else: + + @memoized_result( + uniqueness_dict=postprocessing_params, + output_directory=analyzer_output_dir, + ) + def _sorting_analyzer_compute(): + # Sorting Analyzer sorting_analyzer = si.create_sorting_analyzer( sorting=si_sorting, recording=si_recording, @@ -273,31 +284,33 @@ def make(self, key): **job_kwargs ) - # The order of extension computation is drawn from sorting_analyzer.get_computable_extensions() - # each extension is parameterized by params specified in extensions_params dictionary (skip if not specified) - extensions_params = params["SI_POSTPROCESSING_PARAMS"].get("extensions", {}) - extensions_to_compute = { - ext_name: extensions_params[ext_name] - for ext_name in sorting_analyzer.get_computable_extensions() - if ext_name in extensions_params - } - - sorting_analyzer.compute(extensions_to_compute, **job_kwargs) - - # Save to phy format - if params["SI_POSTPROCESSING_PARAMS"].get("export_to_phy", False): - si.exporters.export_to_phy( - sorting_analyzer=sorting_analyzer, - output_folder=output_dir / sorter_name / "phy", - **job_kwargs, - ) - # Generate spike interface report - if params["SI_POSTPROCESSING_PARAMS"].get("export_report", True): - si.exporters.export_report( - sorting_analyzer=sorting_analyzer, - output_folder=output_dir / sorter_name / "spikeinterface_report", - **job_kwargs, - ) + # The order of extension computation is drawn from sorting_analyzer.get_computable_extensions() + # each extension is parameterized by params specified in extensions_params dictionary (skip if not specified) + extensions_params = postprocessing_params.get("extensions", {}) + extensions_to_compute = { + ext_name: extensions_params[ext_name] + for ext_name in sorting_analyzer.get_computable_extensions() + if ext_name in extensions_params + } + + sorting_analyzer.compute(extensions_to_compute, **job_kwargs) + + # Save to phy format + if postprocessing_params.get("export_to_phy", False): + si.exporters.export_to_phy( + sorting_analyzer=sorting_analyzer, + output_folder=analyzer_output_dir / "phy", + **job_kwargs, + ) + # Generate spike interface report + if postprocessing_params.get("export_report", True): + si.exporters.export_report( + sorting_analyzer=sorting_analyzer, + output_folder=analyzer_output_dir / "spikeinterface_report", + **job_kwargs, + ) + + _sorting_analyzer_compute() self.insert1( { diff --git a/setup.py b/setup.py index f1ba9c90..66789740 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ "scikit-image", "nbformat>=4.2.0", "pyopenephys>=1.1.6", - "element-interface @ git+https://github.com/datajoint/element-interface.git", + "element-interface @ git+https://github.com/datajoint/element-interface.git@dev_memoized_results", "numba", ], extras_require={ From 0afb4529de262fbee6b21461e5aec58765fd0e12 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 14:22:20 -0500 Subject: [PATCH 128/204] chore: minor code cleanup --- element_array_ephys/ephys_no_curation.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 9222ccd2..b49d4422 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -8,14 +8,12 @@ import datajoint as dj import numpy as np import pandas as pd -import spikeinterface as si from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory -from spikeinterface import exporters, postprocessing, qualitymetrics, sorters from . import ephys_report, probe from .readers import kilosort, openephys, spikeglx -log = dj.logger +logger = dj.logger schema = dj.schema() @@ -824,7 +822,7 @@ def infer_output_dir(cls, key, relative: bool = False, mkdir: bool = False): if mkdir: output_dir.mkdir(parents=True, exist_ok=True) - log.info(f"{output_dir} created!") + logger.info(f"{output_dir} created!") return output_dir.relative_to(processed_dir) if relative else output_dir @@ -1040,6 +1038,8 @@ def make(self, key): si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" if si_sorting_analyzer_dir.exists(): # Read from spikeinterface outputs + import spikeinterface as si + sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) si_sorting = sorting_analyzer.sorting @@ -1246,6 +1246,8 @@ def make(self, key): si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs + import spikeinterface as si + sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) # Find representative channel for each unit @@ -1501,6 +1503,8 @@ def make(self, key): si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs + import spikeinterface as si + sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) qc_metrics = sorting_analyzer.get_extension("quality_metrics").get_data() template_metrics = sorting_analyzer.get_extension( From e8f445c3b4b532b3159638e71d231e2048939a90 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 16:47:22 -0500 Subject: [PATCH 129/204] fix: merge fix & formatting --- element_array_ephys/spike_sorting/si_spike_sorting.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index dff74dd7..9e14f636 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -248,7 +248,6 @@ def make(self, key): ).fetch1("clustering_method", "clustering_output_dir", "params") output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) sorter_name = clustering_method.replace(".", "_") - output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) recording_file = output_dir / sorter_name / "recording" / "si_recording.pkl" sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" @@ -281,7 +280,7 @@ def _sorting_analyzer_compute(): folder=analyzer_output_dir, sparse=True, overwrite=True, - **job_kwargs + **job_kwargs, ) # The order of extension computation is drawn from sorting_analyzer.get_computable_extensions() From 6155f13fd755ac76ec79fdd1594b0e96ef8d550b Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 17:01:10 -0500 Subject: [PATCH 130/204] fix: calling `_run_sorter()` --- element_array_ephys/spike_sorting/si_spike_sorting.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 9e14f636..5c1d6567 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -216,6 +216,8 @@ def _run_sorter(): sorting_save_path = sorting_output_dir / "si_sorting.pkl" si_sorting.dump_to_pickle(sorting_save_path, relative_to=output_dir) + _run_sorter() + self.insert1( { **key, From f6a52d9d3f31b7ebe2853da4545551898cfa50ae Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 May 2024 20:07:27 -0500 Subject: [PATCH 131/204] chore: more robust channel mapping --- element_array_ephys/ephys_no_curation.py | 29 ++++++++---------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index b49d4422..142f350b 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1028,9 +1028,8 @@ def make(self, key): # Get channel and electrode-site mapping electrode_query = (EphysRecording.Channel & key).proj(..., "-channel_name") - channel2electrode_map = electrode_query.fetch(as_dict=True) channel2electrode_map: dict[int, dict] = { - chn.pop("channel_idx"): chn for chn in channel2electrode_map + chn.pop("channel_idx"): chn for chn in electrode_query.fetch(as_dict=True) } # Get sorter method and create output directory. @@ -1054,12 +1053,10 @@ def make(self, key): spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() # {unit: spike_count} - # reorder channel2electrode_map according to recording channel ids + # update channel2electrode_map to match with probe's channel index channel2electrode_map = { - chn_idx: channel2electrode_map[chn_idx] - for chn_idx in sorting_analyzer.channel_ids_to_indices( - sorting_analyzer.channel_ids - ) + idx: channel2electrode_map[int(chn_idx)] + for idx, chn_idx in enumerate(sorting_analyzer.get_probe().contact_ids) } # Get unit id to quality label mapping @@ -1239,9 +1236,8 @@ def make(self, key): # Get channel and electrode-site mapping electrode_query = (EphysRecording.Channel & key).proj(..., "-channel_name") - channel2electrode_map = electrode_query.fetch(as_dict=True) channel2electrode_map: dict[int, dict] = { - chn.pop("channel_idx"): chn for chn in channel2electrode_map + chn.pop("channel_idx"): chn for chn in electrode_query.fetch(as_dict=True) } si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" @@ -1258,12 +1254,10 @@ def make(self, key): ) # {unit: peak_channel_index} unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()} - # reorder channel2electrode_map according to recording channel ids - channel_indices = sorting_analyzer.channel_ids_to_indices( - sorting_analyzer.channel_ids - ).tolist() + # update channel2electrode_map to match with probe's channel index channel2electrode_map = { - chn_idx: channel2electrode_map[chn_idx] for chn_idx in channel_indices + idx: channel2electrode_map[int(chn_idx)] + for idx, chn_idx in enumerate(sorting_analyzer.get_probe().contact_ids) } templates = sorting_analyzer.get_extension("templates") @@ -1276,12 +1270,9 @@ def yield_unit_waveforms(): unit_waveforms = templates.get_unit_template( unit_id=unit["unit"], operator="average" ) - peak_chn_idx = channel_indices.index( - unit_peak_channel[unit["unit"]] - ) unit_peak_waveform = { **unit, - "peak_electrode_waveform": unit_waveforms[:, peak_chn_idx], + "peak_electrode_waveform": unit_waveforms[:, unit_peak_channel[unit["unit"]]], } unit_electrode_waveforms = [ @@ -1290,7 +1281,7 @@ def yield_unit_waveforms(): **channel2electrode_map[chn_idx], "waveform_mean": unit_waveforms[:, chn_idx], } - for chn_idx in channel_indices + for chn_idx in channel2electrode_map ] yield unit_peak_waveform, unit_electrode_waveforms From 1ff92dd15db6ff9e8458f53ec96fdffb6b93305d Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 29 May 2024 16:09:16 -0500 Subject: [PATCH 132/204] fix: use relative path for phy output --- element_array_ephys/spike_sorting/si_spike_sorting.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 5c1d6567..93619303 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -301,6 +301,7 @@ def _sorting_analyzer_compute(): si.exporters.export_to_phy( sorting_analyzer=sorting_analyzer, output_folder=analyzer_output_dir / "phy", + use_relative_path=True, **job_kwargs, ) # Generate spike interface report From b45970974df001319a4ebae182bf291313f5e39a Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 29 May 2024 16:16:21 -0500 Subject: [PATCH 133/204] feat: in data ingestion, set peak_sign="both" --- element_array_ephys/ephys_no_curation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 142f350b..8eadba49 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1045,7 +1045,7 @@ def make(self, key): # Find representative channel for each unit unit_peak_channel: dict[int, np.ndarray] = ( si.ChannelSparsity.from_best_channels( - sorting_analyzer, 1, peak_sign="neg" + sorting_analyzer, 1, peak_sign="both" ).unit_id_to_channel_indices ) unit_peak_channel: dict[int, int] = {u: chn[0] for u, chn in unit_peak_channel.items()} @@ -1249,7 +1249,7 @@ def make(self, key): # Find representative channel for each unit unit_peak_channel: dict[int, np.ndarray] = ( si.ChannelSparsity.from_best_channels( - sorting_analyzer, 1, peak_sign="neg" + sorting_analyzer, 1, peak_sign="both" ).unit_id_to_channel_indices ) # {unit: peak_channel_index} unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()} From 1a1b18f8a52b83298bffc8d82555ccc147151dd1 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Mon, 3 Jun 2024 13:22:49 -0500 Subject: [PATCH 134/204] feat: replace `output_folder` with `folder` when calling `run_sorter`, use default value for `peak_sign` --- element_array_ephys/ephys_no_curation.py | 21 ++++++++++++------- .../spike_sorting/si_spike_sorting.py | 2 +- 2 files changed, 15 insertions(+), 8 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 8eadba49..891cee0f 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1045,10 +1045,13 @@ def make(self, key): # Find representative channel for each unit unit_peak_channel: dict[int, np.ndarray] = ( si.ChannelSparsity.from_best_channels( - sorting_analyzer, 1, peak_sign="both" + sorting_analyzer, + 1, ).unit_id_to_channel_indices ) - unit_peak_channel: dict[int, int] = {u: chn[0] for u, chn in unit_peak_channel.items()} + unit_peak_channel: dict[int, int] = { + u: chn[0] for u, chn in unit_peak_channel.items() + } spike_count_dict: dict[int, int] = si_sorting.count_num_spikes_per_unit() # {unit: spike_count} @@ -1084,7 +1087,9 @@ def make(self, key): ) unit_spikes_loc = spike_locations.get_data()[unit_spikes_df.index] _, spike_depths = zip(*unit_spikes_loc) # x-coordinates, y-coordinates - spike_times = si_sorting.get_unit_spike_train(unit_id, return_times=True) + spike_times = si_sorting.get_unit_spike_train( + unit_id, return_times=True + ) assert len(spike_times) == len(spike_sites) == len(spike_depths) @@ -1243,13 +1248,13 @@ def make(self, key): si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs import spikeinterface as si - + sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) # Find representative channel for each unit unit_peak_channel: dict[int, np.ndarray] = ( si.ChannelSparsity.from_best_channels( - sorting_analyzer, 1, peak_sign="both" + sorting_analyzer, 1 ).unit_id_to_channel_indices ) # {unit: peak_channel_index} unit_peak_channel = {u: chn[0] for u, chn in unit_peak_channel.items()} @@ -1272,7 +1277,9 @@ def yield_unit_waveforms(): ) unit_peak_waveform = { **unit, - "peak_electrode_waveform": unit_waveforms[:, unit_peak_channel[unit["unit"]]], + "peak_electrode_waveform": unit_waveforms[ + :, unit_peak_channel[unit["unit"]] + ], } unit_electrode_waveforms = [ @@ -1495,7 +1502,7 @@ def make(self, key): si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs import spikeinterface as si - + sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) qc_metrics = sorting_analyzer.get_extension("quality_metrics").get_data() template_metrics = sorting_analyzer.get_extension( diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 93619303..57aa0ba1 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -205,7 +205,7 @@ def _run_sorter(): si_sorting: si.sorters.BaseSorter = si.sorters.run_sorter( sorter_name=sorter_name, recording=si_recording, - output_folder=sorting_output_dir, + folder=sorting_output_dir, remove_existing_folder=True, verbose=True, docker_image=sorter_name not in si.sorters.installed_sorters(), From 4e645ebd9b83f5e607e1d18188c0c3ce5f84eb4a Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 5 Jun 2024 16:15:25 -0500 Subject: [PATCH 135/204] fix: remove `job_kwargs` for sparsity calculation - memory error in linux container --- element_array_ephys/spike_sorting/si_spike_sorting.py | 1 - 1 file changed, 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 57aa0ba1..b93d9c10 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -282,7 +282,6 @@ def _sorting_analyzer_compute(): folder=analyzer_output_dir, sparse=True, overwrite=True, - **job_kwargs, ) # The order of extension computation is drawn from sorting_analyzer.get_computable_extensions() From 38fdfb2a5fd44f1115aa4f1660482e1639eaa3c2 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 5 Jul 2024 10:59:37 -0500 Subject: [PATCH 136/204] feat: separate `export` (phy and report) into a separate table --- .../spike_sorting/si_spike_sorting.py | 94 +++++++++++++++---- 1 file changed, 78 insertions(+), 16 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index b93d9c10..463af3df 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -239,6 +239,7 @@ class PostProcessing(dj.Imported): --- execution_time: datetime # datetime of the start of this step execution_duration: float # execution duration in hours + do_si_export=1: bool # whether to export to phy """ def make(self, key): @@ -295,22 +296,6 @@ def _sorting_analyzer_compute(): sorting_analyzer.compute(extensions_to_compute, **job_kwargs) - # Save to phy format - if postprocessing_params.get("export_to_phy", False): - si.exporters.export_to_phy( - sorting_analyzer=sorting_analyzer, - output_folder=analyzer_output_dir / "phy", - use_relative_path=True, - **job_kwargs, - ) - # Generate spike interface report - if postprocessing_params.get("export_report", True): - si.exporters.export_report( - sorting_analyzer=sorting_analyzer, - output_folder=analyzer_output_dir / "spikeinterface_report", - **job_kwargs, - ) - _sorting_analyzer_compute() self.insert1( @@ -321,6 +306,8 @@ def _sorting_analyzer_compute(): datetime.utcnow() - execution_time ).total_seconds() / 3600, + "do_si_export": postprocessing_params.get("export_to_phy", False) + or postprocessing_params.get("export_report", False), } ) @@ -328,3 +315,78 @@ def _sorting_analyzer_compute(): ephys.Clustering.insert1( {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True ) + + +@schema +class SIExport(dj.Computed): + """A SpikeInterface export report and to Phy""" + + definition = """ + -> PostProcessing + --- + execution_time: datetime + execution_duration: float + """ + + @property + def key_source(self): + return PostProcessing & "do_si_export = 1" + + def make(self, key): + execution_time = datetime.utcnow() + + clustering_method, output_dir, params = ( + ephys.ClusteringTask * ephys.ClusteringParamSet & key + ).fetch1("clustering_method", "clustering_output_dir", "params") + output_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) + sorter_name = clustering_method.replace(".", "_") + + postprocessing_params = params["SI_POSTPROCESSING_PARAMS"] + + job_kwargs = postprocessing_params.get( + "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} + ) + + analyzer_output_dir = output_dir / sorter_name / "sorting_analyzer" + sorting_analyzer = si.load_sorting_analyzer(folder=analyzer_output_dir) + + @memoized_result( + uniqueness_dict=postprocessing_params, + output_directory=analyzer_output_dir / "phy", + ) + def _export_to_phy(): + # Save to phy format + si.exporters.export_to_phy( + sorting_analyzer=sorting_analyzer, + output_folder=analyzer_output_dir / "phy", + use_relative_path=True, + **job_kwargs, + ) + + @memoized_result( + uniqueness_dict=postprocessing_params, + output_directory=analyzer_output_dir / "spikeinterface_report", + ) + def _export_report(): + # Generate spike interface report + si.exporters.export_report( + sorting_analyzer=sorting_analyzer, + output_folder=analyzer_output_dir / "spikeinterface_report", + **job_kwargs, + ) + + if postprocessing_params.get("export_report", False): + _export_report() + if postprocessing_params.get("export_to_phy", False): + _export_to_phy() + + self.insert1( + { + **key, + "execution_time": execution_time, + "execution_duration": ( + datetime.utcnow() - execution_time + ).total_seconds() + / 3600, + } + ) From a4a8380405673bf2c85861223afa0c9e5e481296 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 5 Jul 2024 11:00:42 -0500 Subject: [PATCH 137/204] fix: export default to `False` --- element_array_ephys/spike_sorting/si_spike_sorting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 463af3df..6f2d7b53 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -239,7 +239,7 @@ class PostProcessing(dj.Imported): --- execution_time: datetime # datetime of the start of this step execution_duration: float # execution duration in hours - do_si_export=1: bool # whether to export to phy + do_si_export=0: bool # whether to export to phy """ def make(self, key): @@ -331,7 +331,7 @@ class SIExport(dj.Computed): @property def key_source(self): return PostProcessing & "do_si_export = 1" - + def make(self, key): execution_time = datetime.utcnow() From 1f05998e25d848b6aeb73231fa90e616580cd1d8 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 5 Jul 2024 16:45:40 -0500 Subject: [PATCH 138/204] fix: `spikes` object no longer available from `ComputeSpikeLocations` (https://github.com/SpikeInterface/spikeinterface/pull/3015) --- element_array_ephys/ephys_no_curation.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index 891cee0f..5df8bad0 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1073,7 +1073,9 @@ def make(self, key): } spike_locations = sorting_analyzer.get_extension("spike_locations") - spikes_df = pd.DataFrame(spike_locations.spikes) + extremum_channel_inds = si.template_tools.get_template_extremum_channel(sorting_analyzer, outputs="index") + spikes_df = pd.DataFrame( + sorting_analyzer.sorting.to_spike_vector(extremum_channel_inds=extremum_channel_inds)) units = [] for unit_idx, unit_id in enumerate(si_sorting.unit_ids): From 7cd8ac8ce8eeb731f149924279bb3b0d990caa45 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 5 Jul 2024 21:26:48 -0500 Subject: [PATCH 139/204] chore: code cleanup --- element_array_ephys/spike_sorting/si_spike_sorting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 6f2d7b53..8624e073 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -9,7 +9,7 @@ import spikeinterface as si from element_array_ephys import probe, readers from element_interface.utils import find_full_path, memoized_result -from spikeinterface import exporters, postprocessing, qualitymetrics, sorters +from spikeinterface import exporters, extractors, sorters from . import si_preprocessing From c87e49332f90386acc8eb696e65f87bfd7b6ae24 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Sat, 6 Jul 2024 08:00:44 -0500 Subject: [PATCH 140/204] fix: recording_extractor_full_dict is deprecated (https://github.com/SpikeInterface/spikeinterface/pull/3153) --- .../spike_sorting/si_spike_sorting.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 8624e073..7133b81c 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -111,25 +111,30 @@ def make(self, key): ) spikeglx_recording.validate_file("ap") data_dir = spikeglx_meta_filepath.parent + + si_extractor = si.extractors.neoextractors.spikeglx.SpikeGLXRecordingExtractor + stream_names, stream_ids = si.extractors.get_neo_streams( + acq_software, folder_path=data_dir + ) + si_recording: si.BaseRecording = si_extractor( + folder_path=data_dir, stream_name=stream_names[0] + ) elif acq_software == "Open Ephys": oe_probe = ephys.get_openephys_probe_data(key) assert len(oe_probe.recording_info["recording_files"]) == 1 data_dir = oe_probe.recording_info["recording_files"][0] + si_extractor = si.extractors.neoextractors.openephys.OpenEphysBinaryRecordingExtractor + + stream_names, stream_ids = si.extractors.get_neo_streams( + acq_software, folder_path=data_dir + ) + si_recording: si.BaseRecording = si_extractor( + folder_path=data_dir, stream_name=stream_names[0] + ) else: raise NotImplementedError( f"SpikeInterface processing for {acq_software} not yet implemented." ) - acq_software = acq_software.replace(" ", "").lower() - si_extractor: si.extractors.neoextractors = ( - si.extractors.extractorlist.recording_extractor_full_dict[acq_software] - ) # data extractor object - - stream_names, stream_ids = si.extractors.get_neo_streams( - acq_software, folder_path=data_dir - ) - si_recording: si.BaseRecording = si_extractor( - folder_path=data_dir, stream_name=stream_names[0] - ) # Add probe information to recording object electrodes_df = ( From 097d9bbf7694e40a839b4cebb49890d1acd325f1 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 30 Jul 2024 18:26:37 -0500 Subject: [PATCH 141/204] fix: bugfix spikeinterface extractor name --- element_array_ephys/spike_sorting/si_spike_sorting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 7133b81c..550ae4a1 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -114,7 +114,7 @@ def make(self, key): si_extractor = si.extractors.neoextractors.spikeglx.SpikeGLXRecordingExtractor stream_names, stream_ids = si.extractors.get_neo_streams( - acq_software, folder_path=data_dir + "spikeglx", folder_path=data_dir ) si_recording: si.BaseRecording = si_extractor( folder_path=data_dir, stream_name=stream_names[0] @@ -126,7 +126,7 @@ def make(self, key): si_extractor = si.extractors.neoextractors.openephys.OpenEphysBinaryRecordingExtractor stream_names, stream_ids = si.extractors.get_neo_streams( - acq_software, folder_path=data_dir + "openephysbinary", folder_path=data_dir ) si_recording: si.BaseRecording = si_extractor( folder_path=data_dir, stream_name=stream_names[0] From b6f131b814ed9dba2e2cc38d6918df52668dd590 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 15 Aug 2024 17:24:09 -0500 Subject: [PATCH 142/204] update: element-interface `main` branch --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 66789740..f1ba9c90 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,7 @@ "scikit-image", "nbformat>=4.2.0", "pyopenephys>=1.1.6", - "element-interface @ git+https://github.com/datajoint/element-interface.git@dev_memoized_results", + "element-interface @ git+https://github.com/datajoint/element-interface.git", "numba", ], extras_require={ From ccd23fc413d126f897c20cececcc35b86cb5190f Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 10 Sep 2024 12:04:48 -0500 Subject: [PATCH 143/204] rearrange(all): major refactor of modules --- CHANGELOG.md | 9 + element_array_ephys/__init__.py | 4 +- .../{ephys_no_curation.py => ephys.py} | 13 +- element_array_ephys/ephys_acute.py | 1594 ----------------- element_array_ephys/ephys_chronic.py | 1523 ---------------- element_array_ephys/ephys_precluster.py | 1435 --------------- element_array_ephys/ephys_report.py | 14 +- element_array_ephys/export/nwb/nwb.py | 9 +- .../spike_sorting/si_spike_sorting.py | 28 +- element_array_ephys/version.py | 2 +- tests/tutorial_pipeline.py | 6 +- 11 files changed, 43 insertions(+), 4594 deletions(-) rename element_array_ephys/{ephys_no_curation.py => ephys.py} (99%) delete mode 100644 element_array_ephys/ephys_acute.py delete mode 100644 element_array_ephys/ephys_chronic.py delete mode 100644 element_array_ephys/ephys_precluster.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 7068216b..34d1a2e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,15 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. + +## [1.0.0] - 2024-09-10 + ++ Update - No longer support multiple variation of ephys module, keep only `ephys_no_curation` module, renamed to `ephys` ++ Update - Remove other ephys modules (e.g. `ephys_acute`, `ephys_chronic`) (moved to different branches) ++ Update - Add support for `SpikeInterface` ++ Update - Remove support for `ecephys_spike_sorting` (moved to a different branch) ++ Update - Simplify the "activate" mechanism + ## [0.4.0] - 2024-08-16 + Add - support for SpikeInterface version >= 0.101.0 (updated API) diff --git a/element_array_ephys/__init__.py b/element_array_ephys/__init__.py index 1c0c7285..079950b4 100644 --- a/element_array_ephys/__init__.py +++ b/element_array_ephys/__init__.py @@ -1 +1,3 @@ -from . import ephys_acute as ephys +from . import ephys + +ephys_no_curation = ephys # alias for backward compatibility diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys.py similarity index 99% rename from element_array_ephys/ephys_no_curation.py rename to element_array_ephys/ephys.py index 5df8bad0..3025d289 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys.py @@ -10,7 +10,7 @@ import pandas as pd from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory -from . import ephys_report, probe +from . import probe from .readers import kilosort, openephys, spikeglx logger = dj.logger @@ -22,7 +22,6 @@ def activate( ephys_schema_name: str, - probe_schema_name: str = None, *, create_schema: bool = True, create_tables: bool = True, @@ -32,7 +31,6 @@ def activate( Args: ephys_schema_name (str): A string containing the name of the ephys schema. - probe_schema_name (str): A string containing the name of the probe schema. create_schema (bool): If True, schema will be created in the database. create_tables (bool): If True, tables related to the schema will be created in the database. linking_module (str): A string containing the module name or module containing the required dependencies to activate the schema. @@ -46,7 +44,6 @@ def activate( get_ephys_root_data_dir(): Returns absolute path for root data director(y/ies) with all electrophysiological recording sessions, as a list of string(s). get_session_direction(session_key: dict): Returns path to electrophysiology data for the a particular session as a list of strings. get_processed_data_dir(): Optional. Returns absolute path for processed data. Defaults to root directory. - """ if isinstance(linking_module, str): @@ -58,17 +55,15 @@ def activate( global _linking_module _linking_module = linking_module - # activate - probe.activate( - probe_schema_name, create_schema=create_schema, create_tables=create_tables - ) + if not probe.schema.is_activated(): + raise RuntimeError("Please activate the `probe` schema first.") + schema.activate( ephys_schema_name, create_schema=create_schema, create_tables=create_tables, add_objects=_linking_module.__dict__, ) - ephys_report.activate(f"{ephys_schema_name}_report", ephys_schema_name) # -------------- Functions required by the elements-ephys --------------- diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py deleted file mode 100644 index c2627fc9..00000000 --- a/element_array_ephys/ephys_acute.py +++ /dev/null @@ -1,1594 +0,0 @@ -import gc -import importlib -import inspect -import pathlib -import re -from decimal import Decimal - -import datajoint as dj -import numpy as np -import pandas as pd -from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory - -from . import ephys_report, probe -from .readers import kilosort, openephys, spikeglx - -log = dj.logger - -schema = dj.schema() - -_linking_module = None - - -def activate( - ephys_schema_name: str, - probe_schema_name: str = None, - *, - create_schema: bool = True, - create_tables: bool = True, - linking_module: str = None, -): - """Activates the `ephys` and `probe` schemas. - - Args: - ephys_schema_name (str): A string containing the name of the ephys schema. - probe_schema_name (str): A string containing the name of the probe schema. - create_schema (bool): If True, schema will be created in the database. - create_tables (bool): If True, tables related to the schema will be created in the database. - linking_module (str): A string containing the module name or module containing the required dependencies to activate the schema. - - Dependencies: - Upstream tables: - Session: A parent table to ProbeInsertion - Probe: A parent table to EphysRecording. Probe information is required before electrophysiology data is imported. - - Functions: - get_ephys_root_data_dir(): Returns absolute path for root data director(y/ies) with all electrophysiological recording sessions, as a list of string(s). - get_session_direction(session_key: dict): Returns path to electrophysiology data for the a particular session as a list of strings. - get_processed_data_dir(): Optional. Returns absolute path for processed data. Defaults to root directory. - """ - - if isinstance(linking_module, str): - linking_module = importlib.import_module(linking_module) - assert inspect.ismodule( - linking_module - ), "The argument 'dependency' must be a module's name or a module" - - global _linking_module - _linking_module = linking_module - - probe.activate( - probe_schema_name, create_schema=create_schema, create_tables=create_tables - ) - schema.activate( - ephys_schema_name, - create_schema=create_schema, - create_tables=create_tables, - add_objects=_linking_module.__dict__, - ) - ephys_report.activate(f"{ephys_schema_name}_report", ephys_schema_name) - - -# -------------- Functions required by the elements-ephys --------------- - - -def get_ephys_root_data_dir() -> list: - """Fetches absolute data path to ephys data directories. - - The absolute path here is used as a reference for all downstream relative paths used in DataJoint. - - Returns: - A list of the absolute path(s) to ephys data directories. - """ - root_directories = _linking_module.get_ephys_root_data_dir() - if isinstance(root_directories, (str, pathlib.Path)): - root_directories = [root_directories] - - if hasattr(_linking_module, "get_processed_root_data_dir"): - root_directories.append(_linking_module.get_processed_root_data_dir()) - - return root_directories - - -def get_session_directory(session_key: dict) -> str: - """Retrieve the session directory with Neuropixels for the given session. - - Args: - session_key (dict): A dictionary mapping subject to an entry in the subject table, and session_datetime corresponding to a session in the database. - - Returns: - A string for the path to the session directory. - """ - return _linking_module.get_session_directory(session_key) - - -def get_processed_root_data_dir() -> str: - """Retrieve the root directory for all processed data. - - Returns: - A string for the full path to the root directory for processed data. - """ - - if hasattr(_linking_module, "get_processed_root_data_dir"): - return _linking_module.get_processed_root_data_dir() - else: - return get_ephys_root_data_dir()[0] - - -# ----------------------------- Table declarations ---------------------- - - -@schema -class AcquisitionSoftware(dj.Lookup): - """Name of software used for recording electrophysiological data. - - Attributes: - acq_software ( varchar(24) ): Acquisition software, e.g,. SpikeGLX, OpenEphys - """ - - definition = """ # Software used for recording of neuropixels probes - acq_software: varchar(24) - """ - contents = zip(["SpikeGLX", "Open Ephys"]) - - -@schema -class ProbeInsertion(dj.Manual): - """Information about probe insertion across subjects and sessions. - - Attributes: - Session (foreign key): Session primary key. - insertion_number (foreign key, str): Unique insertion number for each probe insertion for a given session. - probe.Probe (str): probe.Probe primary key. - """ - - definition = """ - # Probe insertion implanted into an animal for a given session. - -> Session - insertion_number: tinyint unsigned - --- - -> probe.Probe - """ - - @classmethod - def auto_generate_entries(cls, session_key): - """Automatically populate entries in ProbeInsertion table for a session.""" - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(session_key) - ) - # search session dir and determine acquisition software - for ephys_pattern, ephys_acq_type in ( - ("*.ap.meta", "SpikeGLX"), - ("*.oebin", "Open Ephys"), - ): - ephys_meta_filepaths = list(session_dir.rglob(ephys_pattern)) - if ephys_meta_filepaths: - acq_software = ephys_acq_type - break - else: - raise FileNotFoundError( - f"Ephys recording data not found!" - f" Neither SpikeGLX nor Open Ephys recording files found in: {session_dir}" - ) - - probe_list, probe_insertion_list = [], [] - if acq_software == "SpikeGLX": - for meta_fp_idx, meta_filepath in enumerate(ephys_meta_filepaths): - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - - probe_key = { - "probe_type": spikeglx_meta.probe_model, - "probe": spikeglx_meta.probe_SN, - } - if probe_key["probe"] not in [p["probe"] for p in probe_list]: - probe_list.append(probe_key) - - probe_dir = meta_filepath.parent - try: - probe_number = re.search("(imec)?\d{1}$", probe_dir.name).group() - probe_number = int(probe_number.replace("imec", "")) - except AttributeError: - probe_number = meta_fp_idx - - probe_insertion_list.append( - { - **session_key, - "probe": spikeglx_meta.probe_SN, - "insertion_number": int(probe_number), - } - ) - elif acq_software == "Open Ephys": - loaded_oe = openephys.OpenEphys(session_dir) - for probe_idx, oe_probe in enumerate(loaded_oe.probes.values()): - probe_key = { - "probe_type": oe_probe.probe_model, - "probe": oe_probe.probe_SN, - } - if probe_key["probe"] not in [p["probe"] for p in probe_list]: - probe_list.append(probe_key) - probe_insertion_list.append( - { - **session_key, - "probe": oe_probe.probe_SN, - "insertion_number": probe_idx, - } - ) - else: - raise NotImplementedError(f"Unknown acquisition software: {acq_software}") - - probe.Probe.insert(probe_list, skip_duplicates=True) - cls.insert(probe_insertion_list, skip_duplicates=True) - - -@schema -class InsertionLocation(dj.Manual): - """Stereotaxic location information for each probe insertion. - - Attributes: - ProbeInsertion (foreign key): ProbeInsertion primary key. - SkullReference (dict): SkullReference primary key. - ap_location (decimal (6, 2) ): Anterior-posterior location in micrometers. Reference is 0 with anterior values positive. - ml_location (decimal (6, 2) ): Medial-lateral location in micrometers. Reference is zero with right side values positive. - depth (decimal (6, 2) ): Manipulator depth relative to the surface of the brain at zero. Ventral is negative. - Theta (decimal (5, 2) ): elevation - rotation about the ml-axis in degrees relative to positive z-axis. - phi (decimal (5, 2) ): azimuth - rotation about the dv-axis in degrees relative to the positive x-axis. - """ - - definition = """ - # Brain Location of a given probe insertion. - -> ProbeInsertion - --- - -> SkullReference - ap_location: decimal(6, 2) # (um) anterior-posterior; ref is 0; more anterior is more positive - ml_location: decimal(6, 2) # (um) medial axis; ref is 0 ; more right is more positive - depth: decimal(6, 2) # (um) manipulator depth relative to surface of the brain (0); more ventral is more negative - theta=null: decimal(5, 2) # (deg) - elevation - rotation about the ml-axis [0, 180] - w.r.t the z+ axis - phi=null: decimal(5, 2) # (deg) - azimuth - rotation about the dv-axis [0, 360] - w.r.t the x+ axis - beta=null: decimal(5, 2) # (deg) rotation about the shank of the probe [-180, 180] - clockwise is increasing in degree - 0 is the probe-front facing anterior - """ - - -@schema -class EphysRecording(dj.Imported): - """Automated table with electrophysiology recording information for each probe inserted during an experimental session. - - Attributes: - ProbeInsertion (foreign key): ProbeInsertion primary key. - probe.ElectrodeConfig (dict): probe.ElectrodeConfig primary key. - AcquisitionSoftware (dict): AcquisitionSoftware primary key. - sampling_rate (float): sampling rate of the recording in Hertz (Hz). - recording_datetime (datetime): datetime of the recording from this probe. - recording_duration (float): duration of the entire recording from this probe in seconds. - """ - - definition = """ - # Ephys recording from a probe insertion for a given session. - -> ProbeInsertion - --- - -> probe.ElectrodeConfig - -> AcquisitionSoftware - sampling_rate: float # (Hz) - recording_datetime: datetime # datetime of the recording from this probe - recording_duration: float # (seconds) duration of the recording from this probe - """ - - class EphysFile(dj.Part): - """Paths of electrophysiology recording files for each insertion. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - file_path (varchar(255) ): relative file path for electrophysiology recording. - """ - - definition = """ - # Paths of files of a given EphysRecording round. - -> master - file_path: varchar(255) # filepath relative to root data directory - """ - - def make(self, key): - """Populates table with electrophysiology recording information.""" - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - - inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1( - "probe" - ) - - # search session dir and determine acquisition software - for ephys_pattern, ephys_acq_type in ( - ("*.ap.meta", "SpikeGLX"), - ("*.oebin", "Open Ephys"), - ): - ephys_meta_filepaths = list(session_dir.rglob(ephys_pattern)) - if ephys_meta_filepaths: - acq_software = ephys_acq_type - break - else: - raise FileNotFoundError( - f"Ephys recording data not found!" - f" Neither SpikeGLX nor Open Ephys recording files found" - f" in {session_dir}" - ) - - supported_probe_types = probe.ProbeType.fetch("probe_type") - - if acq_software == "SpikeGLX": - for meta_filepath in ephys_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format(key) - ) - - if spikeglx_meta.probe_model in supported_probe_types: - probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - electrode_group_members = [ - probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels probe model" - " {} not yet implemented".format(spikeglx_meta.probe_model) - ) - - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": spikeglx_meta.meta["imSampRate"], - "recording_datetime": spikeglx_meta.recording_time, - "recording_duration": ( - spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(meta_filepath) - ), - } - ) - - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) - self.EphysFile.insert1( - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} - ) - elif acq_software == "Open Ephys": - dataset = openephys.OpenEphys(session_dir) - for serial_number, probe_data in dataset.probes.items(): - if str(serial_number) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No Open Ephys data found for probe insertion: {}".format(key) - ) - - if not probe_data.ap_meta: - raise IOError( - 'No analog signals found - check "structure.oebin" file or "continuous" directory' - ) - - if probe_data.probe_model in supported_probe_types: - probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - electrode_group_members = [ - probe_electrodes[channel_idx] - for channel_idx in probe_data.ap_meta["channels_indices"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels" - " probe model {} not yet implemented".format(probe_data.probe_model) - ) - - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": probe_data.ap_meta["sample_rate"], - "recording_datetime": probe_data.recording_info[ - "recording_datetimes" - ][0], - "recording_duration": np.sum( - probe_data.recording_info["recording_durations"] - ), - } - ) - - root_dir = find_root_directory( - get_ephys_root_data_dir(), - probe_data.recording_info["recording_files"][0], - ) - self.EphysFile.insert( - [ - {**key, "file_path": fp.relative_to(root_dir).as_posix()} - for fp in probe_data.recording_info["recording_files"] - ] - ) - # explicitly garbage collect "dataset" - # as these may have large memory footprint and may not be cleared fast enough - del probe_data, dataset - gc.collect() - else: - raise NotImplementedError( - f"Processing ephys files from" - f" acquisition software of type {acq_software} is" - f" not yet implemented" - ) - - -@schema -class LFP(dj.Imported): - """Extracts local field potentials (LFP) from an electrophysiology recording. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - lfp_sampling_rate (float): Sampling rate for LFPs in Hz. - lfp_time_stamps (longblob): Time stamps with respect to the start of the recording. - lfp_mean (longblob): Overall mean LFP across electrodes. - """ - - definition = """ - # Acquired local field potential (LFP) from a given Ephys recording. - -> EphysRecording - --- - lfp_sampling_rate: float # (Hz) - lfp_time_stamps: longblob # (s) timestamps with respect to the start of the recording (recording_timestamp) - lfp_mean: longblob # (uV) mean of LFP across electrodes - shape (time,) - """ - - class Electrode(dj.Part): - """Saves local field potential data for each electrode. - - Attributes: - LFP (foreign key): LFP primary key. - probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key. - lfp (longblob): LFP recording at this electrode in microvolts. - """ - - definition = """ - -> master - -> probe.ElectrodeConfig.Electrode - --- - lfp: longblob # (uV) recorded lfp at this electrode - """ - - # Only store LFP for every 9th channel, due to high channel density, - # close-by channels exhibit highly similar LFP - _skip_channel_counts = 9 - - def make(self, key): - """Populates the LFP tables.""" - acq_software = (EphysRecording * ProbeInsertion & key).fetch1("acq_software") - - electrode_keys, lfp = [], [] - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - - lfp_channel_ind = spikeglx_recording.lfmeta.recording_channels[ - -1 :: -self._skip_channel_counts - ] - - # Extract LFP data at specified channels and convert to uV - lfp = spikeglx_recording.lf_timeseries[ - :, lfp_channel_ind - ] # (sample x channel) - lfp = ( - lfp * spikeglx_recording.get_channel_bit_volts("lf")[lfp_channel_ind] - ).T # (channel x sample) - - self.insert1( - dict( - key, - lfp_sampling_rate=spikeglx_recording.lfmeta.meta["imSampRate"], - lfp_time_stamps=( - np.arange(lfp.shape[1]) - / spikeglx_recording.lfmeta.meta["imSampRate"] - ), - lfp_mean=lfp.mean(axis=0), - ) - ) - - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - for recorded_site in lfp_channel_ind: - shank, shank_col, shank_row, _ = spikeglx_recording.apmeta.shankmap[ - "data" - ][recorded_site] - electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) - elif acq_software == "Open Ephys": - oe_probe = get_openephys_probe_data(key) - - lfp_channel_ind = np.r_[ - len(oe_probe.lfp_meta["channels_indices"]) - - 1 : 0 : -self._skip_channel_counts - ] - - # (sample x channel) - lfp = oe_probe.lfp_timeseries[:, lfp_channel_ind] - lfp = ( - lfp * np.array(oe_probe.lfp_meta["channels_gains"])[lfp_channel_ind] - ).T # (channel x sample) - lfp_timestamps = oe_probe.lfp_timestamps - - self.insert1( - dict( - key, - lfp_sampling_rate=oe_probe.lfp_meta["sample_rate"], - lfp_time_stamps=lfp_timestamps, - lfp_mean=lfp.mean(axis=0), - ) - ) - - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - electrode_keys.extend( - probe_electrodes[channel_idx] for channel_idx in lfp_channel_ind - ) - else: - raise NotImplementedError( - f"LFP extraction from acquisition software" - f" of type {acq_software} is not yet implemented" - ) - - # single insert in loop to mitigate potential memory issue - for electrode_key, lfp_trace in zip(electrode_keys, lfp): - self.Electrode.insert1({**key, **electrode_key, "lfp": lfp_trace}) - - -# ------------ Clustering -------------- - - -@schema -class ClusteringMethod(dj.Lookup): - """Kilosort clustering method. - - Attributes: - clustering_method (foreign key, varchar(16) ): Kilosort clustering method. - clustering_methods_desc (varchar(1000) ): Additional description of the clustering method. - """ - - definition = """ - # Method for clustering - clustering_method: varchar(16) - --- - clustering_method_desc: varchar(1000) - """ - - contents = [ - ("kilosort2", "kilosort2 clustering method"), - ("kilosort2.5", "kilosort2.5 clustering method"), - ("kilosort3", "kilosort3 clustering method"), - ] - - -@schema -class ClusteringParamSet(dj.Lookup): - """Parameters to be used in clustering procedure for spike sorting. - - Attributes: - paramset_idx (foreign key): Unique ID for the clustering parameter set. - ClusteringMethod (dict): ClusteringMethod primary key. - paramset_desc (varchar(128) ): Description of the clustering parameter set. - param_set_hash (uuid): UUID hash for the parameter set. - params (longblob): Parameters for clustering with Kilosort. - """ - - definition = """ - # Parameter set to be used in a clustering procedure - paramset_idx: smallint - --- - -> ClusteringMethod - paramset_desc: varchar(128) - param_set_hash: uuid - unique index (param_set_hash) - params: longblob # dictionary of all applicable parameters - """ - - @classmethod - def insert_new_params( - cls, - clustering_method: str, - paramset_desc: str, - params: dict, - paramset_idx: int = None, - ): - """Inserts new parameters into the ClusteringParamSet table. - - Args: - clustering_method (str): name of the clustering method. - paramset_desc (str): description of the parameter set - params (dict): clustering parameters - paramset_idx (int, optional): Unique parameter set ID. Defaults to None. - """ - if paramset_idx is None: - paramset_idx = ( - dj.U().aggr(cls, n="max(paramset_idx)").fetch1("n") or 0 - ) + 1 - - param_dict = { - "clustering_method": clustering_method, - "paramset_idx": paramset_idx, - "paramset_desc": paramset_desc, - "params": params, - "param_set_hash": dict_to_uuid( - {**params, "clustering_method": clustering_method} - ), - } - param_query = cls & {"param_set_hash": param_dict["param_set_hash"]} - - if param_query: # If the specified param-set already exists - existing_paramset_idx = param_query.fetch1("paramset_idx") - if ( - existing_paramset_idx == paramset_idx - ): # If the existing set has the same paramset_idx: job done - return - else: # If not same name: human error, trying to add the same paramset with different name - raise dj.DataJointError( - f"The specified param-set already exists" - f" - with paramset_idx: {existing_paramset_idx}" - ) - else: - if {"paramset_idx": paramset_idx} in cls.proj(): - raise dj.DataJointError( - f"The specified paramset_idx {paramset_idx} already exists," - f" please pick a different one." - ) - cls.insert1(param_dict) - - -@schema -class ClusterQualityLabel(dj.Lookup): - """Quality label for each spike sorted cluster. - - Attributes: - cluster_quality_label (foreign key, varchar(100) ): Cluster quality type. - cluster_quality_description ( varchar(4000) ): Description of the cluster quality type. - """ - - definition = """ - # Quality - cluster_quality_label: varchar(100) # cluster quality type - e.g. 'good', 'MUA', 'noise', etc. - --- - cluster_quality_description: varchar(4000) - """ - contents = [ - ("good", "single unit"), - ("ok", "probably a single unit, but could be contaminated"), - ("mua", "multi-unit activity"), - ("noise", "bad unit"), - ] - - -@schema -class ClusteringTask(dj.Manual): - """A clustering task to spike sort electrophysiology datasets. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - ClusteringParamSet (foreign key): ClusteringParamSet primary key. - clustering_output_dir ( varchar (255) ): Relative path to output clustering results. - task_mode (enum): `Trigger` computes clustering or and `load` imports existing data. - """ - - definition = """ - # Manual table for defining a clustering task ready to be run - -> EphysRecording - -> ClusteringParamSet - --- - clustering_output_dir='': varchar(255) # clustering output directory relative to the clustering root data directory - task_mode='load': enum('load', 'trigger') # 'load': load computed analysis results, 'trigger': trigger computation - """ - - @classmethod - def infer_output_dir( - cls, key: dict, relative: bool = False, mkdir: bool = False - ) -> pathlib.Path: - """Infer output directory if it is not provided. - - Args: - key (dict): ClusteringTask primary key. - - Returns: - Expected clustering_output_dir based on the following convention: - processed_dir / session_dir / probe_{insertion_number} / {clustering_method}_{paramset_idx} - e.g.: sub4/sess1/probe_2/kilosort2_0 - """ - processed_dir = pathlib.Path(get_processed_root_data_dir()) - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - root_dir = find_root_directory(get_ephys_root_data_dir(), session_dir) - - method = ( - (ClusteringParamSet * ClusteringMethod & key) - .fetch1("clustering_method") - .replace(".", "-") - ) - - output_dir = ( - processed_dir - / session_dir.relative_to(root_dir) - / f'probe_{key["insertion_number"]}' - / f'{method}_{key["paramset_idx"]}' - ) - - if mkdir: - output_dir.mkdir(parents=True, exist_ok=True) - log.info(f"{output_dir} created!") - - return output_dir.relative_to(processed_dir) if relative else output_dir - - @classmethod - def auto_generate_entries(cls, ephys_recording_key: dict, paramset_idx: int = 0): - """Autogenerate entries based on a particular ephys recording. - - Args: - ephys_recording_key (dict): EphysRecording primary key. - paramset_idx (int, optional): Parameter index to use for clustering task. Defaults to 0. - """ - key = {**ephys_recording_key, "paramset_idx": paramset_idx} - - processed_dir = get_processed_root_data_dir() - output_dir = ClusteringTask.infer_output_dir(key, relative=False, mkdir=True) - - try: - kilosort.Kilosort( - output_dir - ) # check if the directory is a valid Kilosort output - except FileNotFoundError: - task_mode = "trigger" - else: - task_mode = "load" - - cls.insert1( - { - **key, - "clustering_output_dir": output_dir.relative_to( - processed_dir - ).as_posix(), - "task_mode": task_mode, - } - ) - - -@schema -class Clustering(dj.Imported): - """A processing table to handle each clustering task. - - Attributes: - ClusteringTask (foreign key): ClusteringTask primary key. - clustering_time (datetime): Time when clustering results are generated. - package_version ( varchar(16) ): Package version used for a clustering analysis. - """ - - definition = """ - # Clustering Procedure - -> ClusteringTask - --- - clustering_time: datetime # time of generation of this set of clustering results - package_version='': varchar(16) - """ - - def make(self, key): - """Triggers or imports clustering analysis.""" - task_mode, output_dir = (ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - - if not output_dir: - output_dir = ClusteringTask.infer_output_dir(key, relative=True, mkdir=True) - # update clustering_output_dir - ClusteringTask.update1( - {**key, "clustering_output_dir": output_dir.as_posix()} - ) - - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - if task_mode == "load": - kilosort.Kilosort( - kilosort_dir - ) # check if the directory is a valid Kilosort output - elif task_mode == "trigger": - acq_software, clustering_method, params = ( - ClusteringTask * EphysRecording * ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method", "params") - - if "kilosort" in clustering_method: - from element_array_ephys.readers import kilosort_triggering - - # add additional probe-recording and channels details into `params` - params = {**params, **get_recording_channels_details(key)} - params["fs"] = params["sample_rate"] - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX( - spikeglx_meta_filepath.parent - ) - spikeglx_recording.validate_file("ap") - run_CatGT = ( - params.pop("run_CatGT", True) - and "_tcat." not in spikeglx_meta_filepath.stem - ) - - if clustering_method.startswith("pykilosort"): - kilosort_triggering.run_pykilosort( - continuous_file=spikeglx_recording.root_dir - / (spikeglx_recording.root_name + ".ap.bin"), - kilosort_output_directory=kilosort_dir, - channel_ind=params.pop("channel_ind"), - x_coords=params.pop("x_coords"), - y_coords=params.pop("y_coords"), - shank_ind=params.pop("shank_ind"), - connected=params.pop("connected"), - sample_rate=params.pop("sample_rate"), - params=params, - ) - else: - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=run_CatGT, - ) - run_kilosort.run_modules() - elif acq_software == "Open Ephys": - oe_probe = get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - if clustering_method.startswith("pykilosort"): - kilosort_triggering.run_pykilosort( - continuous_file=pathlib.Path( - oe_probe.recording_info["recording_files"][0] - ) - / "continuous.dat", - kilosort_output_directory=kilosort_dir, - channel_ind=params.pop("channel_ind"), - x_coords=params.pop("x_coords"), - y_coords=params.pop("y_coords"), - shank_ind=params.pop("shank_ind"), - connected=params.pop("connected"), - sample_rate=params.pop("sample_rate"), - params=params, - ) - else: - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort.run_modules() - else: - raise NotImplementedError( - f"Automatic triggering of {clustering_method}" - f" clustering analysis is not yet supported" - ) - - else: - raise ValueError(f"Unknown task mode: {task_mode}") - - creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) - - -@schema -class Curation(dj.Manual): - """Curation procedure table. - - Attributes: - Clustering (foreign key): Clustering primary key. - curation_id (foreign key, int): Unique curation ID. - curation_time (datetime): Time when curation results are generated. - curation_output_dir ( varchar(255) ): Output directory of the curated results. - quality_control (bool): If True, this clustering result has undergone quality control. - manual_curation (bool): If True, manual curation has been performed on this clustering result. - curation_note ( varchar(2000) ): Notes about the curation task. - """ - - definition = """ - # Manual curation procedure - -> Clustering - curation_id: int - --- - curation_time: datetime # time of generation of this set of curated clustering results - curation_output_dir: varchar(255) # output directory of the curated results, relative to root data directory - quality_control: bool # has this clustering result undergone quality control? - manual_curation: bool # has manual curation been performed on this clustering result? - curation_note='': varchar(2000) - """ - - def create1_from_clustering_task(self, key, curation_note=""): - """ - A function to create a new corresponding "Curation" for a particular - "ClusteringTask" - """ - if key not in Clustering(): - raise ValueError( - f"No corresponding entry in Clustering available" - f" for: {key}; do `Clustering.populate(key)`" - ) - - task_mode, output_dir = (ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - creation_time, is_curated, is_qc = kilosort.extract_clustering_info( - kilosort_dir - ) - # Synthesize curation_id - curation_id = ( - dj.U().aggr(self & key, n="ifnull(max(curation_id)+1,1)").fetch1("n") - ) - self.insert1( - { - **key, - "curation_id": curation_id, - "curation_time": creation_time, - "curation_output_dir": output_dir, - "quality_control": is_qc, - "manual_curation": is_curated, - "curation_note": curation_note, - } - ) - - -@schema -class CuratedClustering(dj.Imported): - """Clustering results after curation. - - Attributes: - Curation (foreign key): Curation primary key. - """ - - definition = """ - # Clustering results of a curation. - -> Curation - """ - - class Unit(dj.Part): - """Single unit properties after clustering and curation. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - unit (foreign key, int): Unique integer identifying a single unit. - probe.ElectrodeConfig.Electrode (dict): probe.ElectrodeConfig.Electrode primary key. - ClusteringQualityLabel (dict): CLusteringQualityLabel primary key. - spike_count (int): Number of spikes in this recording for this unit. - spike_times (longblob): Spike times of this unit, relative to start time of EphysRecording. - spike_sites (longblob): Array of electrode associated with each spike. - spike_depths (longblob): Array of depths associated with each spike, relative to each spike. - """ - - definition = """ - # Properties of a given unit from a round of clustering (and curation) - -> master - unit: int - --- - -> probe.ElectrodeConfig.Electrode # electrode with highest waveform amplitude for this unit - -> ClusterQualityLabel - spike_count: int # how many spikes in this recording for this unit - spike_times: longblob # (s) spike times of this unit, relative to the start of the EphysRecording - spike_sites : longblob # array of electrode associated with each spike - spike_depths=null : longblob # (um) array of depths associated with each spike, relative to the (0, 0) of the probe - """ - - def make(self, key): - """Automated population of Unit information.""" - output_dir = (Curation & key).fetch1("curation_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - acq_software, sample_rate = (EphysRecording & key).fetch1( - "acq_software", "sampling_rate" - ) - - sample_rate = kilosort_dataset.data["params"].get("sample_rate", sample_rate) - - # ---------- Unit ---------- - # -- Remove 0-spike units - withspike_idx = [ - i - for i, u in enumerate(kilosort_dataset.data["cluster_ids"]) - if (kilosort_dataset.data["spike_clusters"] == u).any() - ] - valid_units = kilosort_dataset.data["cluster_ids"][withspike_idx] - valid_unit_labels = kilosort_dataset.data["cluster_groups"][withspike_idx] - # -- Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map(key, acq_software) - - # -- Spike-times -- - # spike_times_sec_adj > spike_times_sec > spike_times - spike_time_key = ( - "spike_times_sec_adj" - if "spike_times_sec_adj" in kilosort_dataset.data - else ( - "spike_times_sec" - if "spike_times_sec" in kilosort_dataset.data - else "spike_times" - ) - ) - spike_times = kilosort_dataset.data[spike_time_key] - kilosort_dataset.extract_spike_depths() - - # -- Spike-sites and Spike-depths -- - spike_sites = np.array( - [ - channel2electrodes[s]["electrode"] - for s in kilosort_dataset.data["spike_sites"] - ] - ) - spike_depths = kilosort_dataset.data["spike_depths"] - - # -- Insert unit, label, peak-chn - units = [] - for unit, unit_lbl in zip(valid_units, valid_unit_labels): - if (kilosort_dataset.data["spike_clusters"] == unit).any(): - unit_channel, _ = kilosort_dataset.get_best_channel(unit) - unit_spike_times = ( - spike_times[kilosort_dataset.data["spike_clusters"] == unit] - / sample_rate - ) - spike_count = len(unit_spike_times) - - units.append( - { - "unit": unit, - "cluster_quality_label": unit_lbl, - **channel2electrodes[unit_channel], - "spike_times": unit_spike_times, - "spike_count": spike_count, - "spike_sites": spike_sites[ - kilosort_dataset.data["spike_clusters"] == unit - ], - "spike_depths": ( - spike_depths[ - kilosort_dataset.data["spike_clusters"] == unit - ] - if spike_depths is not None - else None - ), - } - ) - - self.insert1(key) - self.Unit.insert([{**key, **u} for u in units]) - - -@schema -class WaveformSet(dj.Imported): - """A set of spike waveforms for units out of a given CuratedClustering. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - """ - - definition = """ - # A set of spike waveforms for units out of a given CuratedClustering - -> CuratedClustering - """ - - class PeakWaveform(dj.Part): - """Mean waveform across spikes for a given unit. - - Attributes: - WaveformSet (foreign key): WaveformSet primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - peak_electrode_waveform (longblob): Mean waveform for a given unit at its representative electrode. - """ - - definition = """ - # Mean waveform across spikes for a given unit at its representative electrode - -> master - -> CuratedClustering.Unit - --- - peak_electrode_waveform: longblob # (uV) mean waveform for a given unit at its representative electrode - """ - - class Waveform(dj.Part): - """Spike waveforms for a given unit. - - Attributes: - WaveformSet (foreign key): WaveformSet primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key. - waveform_mean (longblob): mean waveform across spikes of the unit in microvolts. - waveforms (longblob): waveforms of a sampling of spikes at the given electrode and unit. - """ - - definition = """ - # Spike waveforms and their mean across spikes for the given unit - -> master - -> CuratedClustering.Unit - -> probe.ElectrodeConfig.Electrode - --- - waveform_mean: longblob # (uV) mean waveform across spikes of the given unit - waveforms=null: longblob # (uV) (spike x sample) waveforms of a sampling of spikes at the given electrode for the given unit - """ - - def make(self, key): - """Populates waveform tables.""" - output_dir = (Curation & key).fetch1("curation_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - - acq_software, probe_serial_number = ( - EphysRecording * ProbeInsertion & key - ).fetch1("acq_software", "probe") - - # -- Get channel and electrode-site mapping - recording_key = (EphysRecording & key).fetch1("KEY") - channel2electrodes = get_neuropixels_channel2electrode_map( - recording_key, acq_software - ) - - is_qc = (Curation & key).fetch1("quality_control") - - # Get all units - units = { - u["unit"]: u - for u in (CuratedClustering.Unit & key).fetch(as_dict=True, order_by="unit") - } - - if is_qc: - unit_waveforms = np.load( - kilosort_dir / "mean_waveforms.npy" - ) # unit x channel x sample - - def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - if unit_no in units: - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], unit_waveform - ): - unit_electrode_waveforms.append( - { - **units[unit_no], - **channel2electrodes[channel], - "waveform_mean": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == units[unit_no]["electrode"] - ): - unit_peak_waveform = { - **units[unit_no], - "peak_electrode_waveform": channel_waveform, - } - yield unit_peak_waveform, unit_electrode_waveforms - - else: - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - elif acq_software == "Open Ephys": - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - openephys_dataset = openephys.OpenEphys(session_dir) - neuropixels_recording = openephys_dataset.probes[probe_serial_number] - - def yield_unit_waveforms(): - for unit_dict in units.values(): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - - spikes = unit_dict["spike_times"] - waveforms = neuropixels_recording.extract_spike_waveforms( - spikes, kilosort_dataset.data["channel_map"] - ) # (sample x channel x spike) - waveforms = waveforms.transpose( - (1, 2, 0) - ) # (channel x spike x sample) - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], waveforms - ): - unit_electrode_waveforms.append( - { - **unit_dict, - **channel2electrodes[channel], - "waveform_mean": channel_waveform.mean(axis=0), - "waveforms": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == unit_dict["electrode"] - ): - unit_peak_waveform = { - **unit_dict, - "peak_electrode_waveform": channel_waveform.mean( - axis=0 - ), - } - - yield unit_peak_waveform, unit_electrode_waveforms - - # insert waveform on a per-unit basis to mitigate potential memory issue - self.insert1(key) - for unit_peak_waveform, unit_electrode_waveforms in yield_unit_waveforms(): - if unit_peak_waveform: - self.PeakWaveform.insert1(unit_peak_waveform, ignore_extra_fields=True) - if unit_electrode_waveforms: - self.Waveform.insert(unit_electrode_waveforms, ignore_extra_fields=True) - - -@schema -class QualityMetrics(dj.Imported): - """Clustering and waveform quality metrics. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - """ - - definition = """ - # Clusters and waveforms metrics - -> CuratedClustering - """ - - class Cluster(dj.Part): - """Cluster metrics for a unit. - - Attributes: - QualityMetrics (foreign key): QualityMetrics primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - firing_rate (float): Firing rate of the unit. - snr (float): Signal-to-noise ratio for a unit. - presence_ratio (float): Fraction of time where spikes are present. - isi_violation (float): rate of ISI violation as a fraction of overall rate. - number_violation (int): Total ISI violations. - amplitude_cutoff (float): Estimate of miss rate based on amplitude histogram. - isolation_distance (float): Distance to nearest cluster. - l_ratio (float): Amount of empty space between a cluster and other spikes in dataset. - d_prime (float): Classification accuracy based on LDA. - nn_hit_rate (float): Fraction of neighbors for target cluster that are also in target cluster. - nn_miss_rate (float): Fraction of neighbors outside target cluster that are in the target cluster. - silhouette_core (float): Maximum change in spike depth throughout recording. - cumulative_drift (float): Cumulative change in spike depth throughout recording. - contamination_rate (float): Frequency of spikes in the refractory period. - """ - - definition = """ - # Cluster metrics for a particular unit - -> master - -> CuratedClustering.Unit - --- - firing_rate=null: float # (Hz) firing rate for a unit - snr=null: float # signal-to-noise ratio for a unit - presence_ratio=null: float # fraction of time in which spikes are present - isi_violation=null: float # rate of ISI violation as a fraction of overall rate - number_violation=null: int # total number of ISI violations - amplitude_cutoff=null: float # estimate of miss rate based on amplitude histogram - isolation_distance=null: float # distance to nearest cluster in Mahalanobis space - l_ratio=null: float # - d_prime=null: float # Classification accuracy based on LDA - nn_hit_rate=null: float # Fraction of neighbors for target cluster that are also in target cluster - nn_miss_rate=null: float # Fraction of neighbors outside target cluster that are in target cluster - silhouette_score=null: float # Standard metric for cluster overlap - max_drift=null: float # Maximum change in spike depth throughout recording - cumulative_drift=null: float # Cumulative change in spike depth throughout recording - contamination_rate=null: float # - """ - - class Waveform(dj.Part): - """Waveform metrics for a particular unit. - - Attributes: - QualityMetrics (foreign key): QualityMetrics primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - amplitude (float): Absolute difference between waveform peak and trough in microvolts. - duration (float): Time between waveform peak and trough in milliseconds. - halfwidth (float): Spike width at half max amplitude. - pt_ratio (float): Absolute amplitude of peak divided by absolute amplitude of trough relative to 0. - repolarization_slope (float): Slope of the regression line fit to first 30 microseconds from trough to peak. - recovery_slope (float): Slope of the regression line fit to first 30 microseconds from peak to tail. - spread (float): The range with amplitude over 12-percent of maximum amplitude along the probe. - velocity_above (float): inverse velocity of waveform propagation from soma to the top of the probe. - velocity_below (float): inverse velocity of waveform propagation from soma toward the bottom of the probe. - """ - - definition = """ - # Waveform metrics for a particular unit - -> master - -> CuratedClustering.Unit - --- - amplitude: float # (uV) absolute difference between waveform peak and trough - duration: float # (ms) time between waveform peak and trough - halfwidth=null: float # (ms) spike width at half max amplitude - pt_ratio=null: float # absolute amplitude of peak divided by absolute amplitude of trough relative to 0 - repolarization_slope=null: float # the repolarization slope was defined by fitting a regression line to the first 30us from trough to peak - recovery_slope=null: float # the recovery slope was defined by fitting a regression line to the first 30us from peak to tail - spread=null: float # (um) the range with amplitude above 12-percent of the maximum amplitude along the probe - velocity_above=null: float # (s/m) inverse velocity of waveform propagation from the soma toward the top of the probe - velocity_below=null: float # (s/m) inverse velocity of waveform propagation from the soma toward the bottom of the probe - """ - - def make(self, key): - """Populates tables with quality metrics data.""" - output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - metric_fp = kilosort_dir / "metrics.csv" - rename_dict = { - "isi_viol": "isi_violation", - "num_viol": "number_violation", - "contam_rate": "contamination_rate", - } - - if not metric_fp.exists(): - raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") - - metrics_df = pd.read_csv(metric_fp) - metrics_df.set_index("cluster_id", inplace=True) - metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True) - metrics_df.columns = metrics_df.columns.str.lower() - metrics_df.rename(columns=rename_dict, inplace=True) - metrics_list = [ - dict(metrics_df.loc[unit_key["unit"]], **unit_key) - for unit_key in (CuratedClustering.Unit & key).fetch("KEY") - ] - - self.insert1(key) - self.Cluster.insert(metrics_list, ignore_extra_fields=True) - self.Waveform.insert(metrics_list, ignore_extra_fields=True) - - -# ---------------- HELPER FUNCTIONS ---------------- - - -def get_spikeglx_meta_filepath(ephys_recording_key: dict) -> str: - """Get spikeGLX data filepath.""" - # attempt to retrieve from EphysRecording.EphysFile - spikeglx_meta_filepath = pathlib.Path( - ( - EphysRecording.EphysFile - & ephys_recording_key - & 'file_path LIKE "%.ap.meta"' - ).fetch1("file_path") - ) - - try: - spikeglx_meta_filepath = find_full_path( - get_ephys_root_data_dir(), spikeglx_meta_filepath - ) - except FileNotFoundError: - # if not found, search in session_dir again - if not spikeglx_meta_filepath.exists(): - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(ephys_recording_key) - ) - inserted_probe_serial_number = ( - ProbeInsertion * probe.Probe & ephys_recording_key - ).fetch1("probe") - - spikeglx_meta_filepaths = [fp for fp in session_dir.rglob("*.ap.meta")] - for meta_filepath in spikeglx_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - spikeglx_meta_filepath = meta_filepath - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format( - ephys_recording_key - ) - ) - - return spikeglx_meta_filepath - - -def get_openephys_probe_data(ephys_recording_key: dict) -> list: - """Get OpenEphys probe data from file.""" - inserted_probe_serial_number = ( - ProbeInsertion * probe.Probe & ephys_recording_key - ).fetch1("probe") - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(ephys_recording_key) - ) - loaded_oe = openephys.OpenEphys(session_dir) - probe_data = loaded_oe.probes[inserted_probe_serial_number] - - # explicitly garbage collect "loaded_oe" - # as these may have large memory footprint and may not be cleared fast enough - del loaded_oe - gc.collect() - - return probe_data - - -def get_neuropixels_channel2electrode_map( - ephys_recording_key: dict, acq_software: str -) -> dict: - """Get the channel map for neuropixels probe.""" - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) - spikeglx_meta = spikeglx.SpikeGLXMeta(spikeglx_meta_filepath) - electrode_config_key = ( - EphysRecording * probe.ElectrodeConfig & ephys_recording_key - ).fetch1("KEY") - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & electrode_config_key - ) - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - channel2electrode_map = { - recorded_site: probe_electrodes[(shank, shank_col, shank_row)] - for recorded_site, (shank, shank_col, shank_row, _) in enumerate( - spikeglx_meta.shankmap["data"] - ) - } - elif acq_software == "Open Ephys": - probe_dataset = get_openephys_probe_data(ephys_recording_key) - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode * EphysRecording - & ephys_recording_key - ) - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - channel2electrode_map = { - channel_idx: probe_electrodes[channel_idx] - for channel_idx in probe_dataset.ap_meta["channels_indices"] - } - - return channel2electrode_map - - -def generate_electrode_config(probe_type: str, electrode_keys: list) -> dict: - """Generate and insert new ElectrodeConfig - - Args: - probe_type (str): probe type (e.g. neuropixels 2.0 - SS) - electrode_keys (list): list of keys of the probe.ProbeType.Electrode table - - Returns: - dict: representing a key of the probe.ElectrodeConfig table - """ - # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid({k["electrode"]: k for k in electrode_keys}) - - electrode_list = sorted([k["electrode"] for k in electrode_keys]) - electrode_gaps = ( - [-1] - + np.where(np.diff(electrode_list) > 1)[0].tolist() - + [len(electrode_list) - 1] - ) - electrode_config_name = "; ".join( - [ - f"{electrode_list[start + 1]}-{electrode_list[end]}" - for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) - ] - ) - - electrode_config_key = {"electrode_config_hash": electrode_config_hash} - - # ---- make new ElectrodeConfig if needed ---- - if not probe.ElectrodeConfig & electrode_config_key: - probe.ElectrodeConfig.insert1( - { - **electrode_config_key, - "probe_type": probe_type, - "electrode_config_name": electrode_config_name, - } - ) - probe.ElectrodeConfig.Electrode.insert( - {**electrode_config_key, **electrode} for electrode in electrode_keys - ) - - return electrode_config_key - - -def get_recording_channels_details(ephys_recording_key: dict) -> np.array: - """Get details of recording channels for a given recording.""" - channels_details = {} - - acq_software, sample_rate = (EphysRecording & ephys_recording_key).fetch1( - "acq_software", "sampling_rate" - ) - - probe_type = (ProbeInsertion * probe.Probe & ephys_recording_key).fetch1( - "probe_type" - ) - channels_details["probe_type"] = { - "neuropixels 1.0 - 3A": "3A", - "neuropixels 1.0 - 3B": "NP1", - "neuropixels UHD": "NP1100", - "neuropixels 2.0 - SS": "NP21", - "neuropixels 2.0 - MS": "NP24", - }[probe_type] - - electrode_config_key = ( - probe.ElectrodeConfig * EphysRecording & ephys_recording_key - ).fetch1("KEY") - ( - channels_details["channel_ind"], - channels_details["x_coords"], - channels_details["y_coords"], - channels_details["shank_ind"], - ) = ( - probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode - & electrode_config_key - ).fetch( - "electrode", "x_coord", "y_coord", "shank" - ) - channels_details["sample_rate"] = sample_rate - channels_details["num_channels"] = len(channels_details["channel_ind"]) - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - channels_details["uVPerBit"] = spikeglx_recording.get_channel_bit_volts("ap")[0] - channels_details["connected"] = np.array( - [v for *_, v in spikeglx_recording.apmeta.shankmap["data"]] - ) - elif acq_software == "Open Ephys": - oe_probe = get_openephys_probe_data(ephys_recording_key) - channels_details["uVPerBit"] = oe_probe.ap_meta["channels_gains"][0] - channels_details["connected"] = np.array( - [ - int(v == 1) - for c, v in oe_probe.channels_connected.items() - if c in channels_details["channel_ind"] - ] - ) - - return channels_details diff --git a/element_array_ephys/ephys_chronic.py b/element_array_ephys/ephys_chronic.py deleted file mode 100644 index 772e885f..00000000 --- a/element_array_ephys/ephys_chronic.py +++ /dev/null @@ -1,1523 +0,0 @@ -import gc -import importlib -import inspect -import pathlib -from decimal import Decimal - -import datajoint as dj -import numpy as np -import pandas as pd -from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory - -from . import ephys_report, probe -from .readers import kilosort, openephys, spikeglx - -log = dj.logger - -schema = dj.schema() - -_linking_module = None - - -def activate( - ephys_schema_name: str, - probe_schema_name: str = None, - *, - create_schema: bool = True, - create_tables: bool = True, - linking_module: str = None, -): - """Activates the `ephys` and `probe` schemas. - - Args: - ephys_schema_name (str): A string containing the name of the ephys schema. - probe_schema_name (str): A string containing the name of the probe schema. - create_schema (bool): If True, schema will be created in the database. - create_tables (bool): If True, tables related to the schema will be created in the database. - linking_module (str): A string containing the module name or module containing the required dependencies to activate the schema. - - Dependencies: - Upstream tables: - Session: A parent table to ProbeInsertion - Probe: A parent table to EphysRecording. Probe information is required before electrophysiology data is imported. - - Functions: - get_ephys_root_data_dir(): Returns absolute path for root data director(y/ies) with all electrophysiological recording sessions, as a list of string(s). - get_session_direction(session_key: dict): Returns path to electrophysiology data for the a particular session as a list of strings. - get_processed_data_dir(): Optional. Returns absolute path for processed data. Defaults to root directory. - """ - - if isinstance(linking_module, str): - linking_module = importlib.import_module(linking_module) - assert inspect.ismodule( - linking_module - ), "The argument 'dependency' must be a module's name or a module" - - global _linking_module - _linking_module = linking_module - - probe.activate( - probe_schema_name, create_schema=create_schema, create_tables=create_tables - ) - schema.activate( - ephys_schema_name, - create_schema=create_schema, - create_tables=create_tables, - add_objects=_linking_module.__dict__, - ) - ephys_report.activate(f"{ephys_schema_name}_report", ephys_schema_name) - - -# -------------- Functions required by the elements-ephys --------------- - - -def get_ephys_root_data_dir() -> list: - """Fetches absolute data path to ephys data directories. - - The absolute path here is used as a reference for all downstream relative paths used in DataJoint. - - Returns: - A list of the absolute path(s) to ephys data directories. - """ - root_directories = _linking_module.get_ephys_root_data_dir() - if isinstance(root_directories, (str, pathlib.Path)): - root_directories = [root_directories] - - if hasattr(_linking_module, "get_processed_root_data_dir"): - root_directories.append(_linking_module.get_processed_root_data_dir()) - - return root_directories - - -def get_session_directory(session_key: dict) -> str: - """Retrieve the session directory with Neuropixels for the given session. - - Args: - session_key (dict): A dictionary mapping subject to an entry in the subject table, and session_datetime corresponding to a session in the database. - - Returns: - A string for the path to the session directory. - """ - return _linking_module.get_session_directory(session_key) - - -def get_processed_root_data_dir() -> str: - """Retrieve the root directory for all processed data. - - Returns: - A string for the full path to the root directory for processed data. - """ - - if hasattr(_linking_module, "get_processed_root_data_dir"): - return _linking_module.get_processed_root_data_dir() - else: - return get_ephys_root_data_dir()[0] - - -# ----------------------------- Table declarations ---------------------- - - -@schema -class AcquisitionSoftware(dj.Lookup): - """Name of software used for recording electrophysiological data. - - Attributes: - acq_software ( varchar(24) ): Acquisition software, e.g,. SpikeGLX, OpenEphys - """ - - definition = """ # Software used for recording of neuropixels probes - acq_software: varchar(24) - """ - contents = zip(["SpikeGLX", "Open Ephys"]) - - -@schema -class ProbeInsertion(dj.Manual): - """Information about probe insertion across subjects and sessions. - - Attributes: - Session (foreign key): Session primary key. - insertion_number (foreign key, str): Unique insertion number for each probe insertion for a given session. - probe.Probe (str): probe.Probe primary key. - """ - - definition = """ - # Probe insertion chronically implanted into an animal. - -> Subject - insertion_number: tinyint unsigned - --- - -> probe.Probe - insertion_datetime=null: datetime - """ - - -@schema -class InsertionLocation(dj.Manual): - """Stereotaxic location information for each probe insertion. - - Attributes: - ProbeInsertion (foreign key): ProbeInsertion primary key. - SkullReference (dict): SkullReference primary key. - ap_location (decimal (6, 2) ): Anterior-posterior location in micrometers. Reference is 0 with anterior values positive. - ml_location (decimal (6, 2) ): Medial-lateral location in micrometers. Reference is zero with right side values positive. - depth (decimal (6, 2) ): Manipulator depth relative to the surface of the brain at zero. Ventral is negative. - Theta (decimal (5, 2) ): elevation - rotation about the ml-axis in degrees relative to positive z-axis. - phi (decimal (5, 2) ): azimuth - rotation about the dv-axis in degrees relative to the positive x-axis. - """ - - definition = """ - # Brain Location of a given probe insertion. - -> ProbeInsertion - --- - -> SkullReference - ap_location: decimal(6, 2) # (um) anterior-posterior; ref is 0; more anterior is more positive - ml_location: decimal(6, 2) # (um) medial axis; ref is 0 ; more right is more positive - depth: decimal(6, 2) # (um) manipulator depth relative to surface of the brain (0); more ventral is more negative - theta=null: decimal(5, 2) # (deg) - elevation - rotation about the ml-axis [0, 180] - w.r.t the z+ axis - phi=null: decimal(5, 2) # (deg) - azimuth - rotation about the dv-axis [0, 360] - w.r.t the x+ axis - beta=null: decimal(5, 2) # (deg) rotation about the shank of the probe [-180, 180] - clockwise is increasing in degree - 0 is the probe-front facing anterior - """ - - -@schema -class EphysRecording(dj.Imported): - """Automated table with electrophysiology recording information for each probe inserted during an experimental session. - - Attributes: - ProbeInsertion (foreign key): ProbeInsertion primary key. - probe.ElectrodeConfig (dict): probe.ElectrodeConfig primary key. - AcquisitionSoftware (dict): AcquisitionSoftware primary key. - sampling_rate (float): sampling rate of the recording in Hertz (Hz). - recording_datetime (datetime): datetime of the recording from this probe. - recording_duration (float): duration of the entire recording from this probe in seconds. - """ - - definition = """ - # Ephys recording from a probe insertion for a given session. - -> Session - -> ProbeInsertion - --- - -> probe.ElectrodeConfig - -> AcquisitionSoftware - sampling_rate: float # (Hz) - recording_datetime: datetime # datetime of the recording from this probe - recording_duration: float # (seconds) duration of the recording from this probe - """ - - class EphysFile(dj.Part): - """Paths of electrophysiology recording files for each insertion. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - file_path (varchar(255) ): relative file path for electrophysiology recording. - """ - - definition = """ - # Paths of files of a given EphysRecording round. - -> master - file_path: varchar(255) # filepath relative to root data directory - """ - - def make(self, key): - """Populates table with electrophysiology recording information.""" - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - - inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1( - "probe" - ) - - # search session dir and determine acquisition software - for ephys_pattern, ephys_acq_type in ( - ("*.ap.meta", "SpikeGLX"), - ("*.oebin", "Open Ephys"), - ): - ephys_meta_filepaths = list(session_dir.rglob(ephys_pattern)) - if ephys_meta_filepaths: - acq_software = ephys_acq_type - break - else: - raise FileNotFoundError( - f"Ephys recording data not found!" - f" Neither SpikeGLX nor Open Ephys recording files found" - f" in {session_dir}" - ) - - supported_probe_types = probe.ProbeType.fetch("probe_type") - - if acq_software == "SpikeGLX": - for meta_filepath in ephys_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - f"No SpikeGLX data found for probe insertion: {key}" - + " The probe serial number does not match." - ) - - if spikeglx_meta.probe_model in supported_probe_types: - probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - electrode_group_members = [ - probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels probe model" - " {} not yet implemented".format(spikeglx_meta.probe_model) - ) - - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": spikeglx_meta.meta["imSampRate"], - "recording_datetime": spikeglx_meta.recording_time, - "recording_duration": ( - spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(meta_filepath) - ), - } - ) - - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) - self.EphysFile.insert1( - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} - ) - elif acq_software == "Open Ephys": - dataset = openephys.OpenEphys(session_dir) - for serial_number, probe_data in dataset.probes.items(): - if str(serial_number) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No Open Ephys data found for probe insertion: {}".format(key) - ) - - if not probe_data.ap_meta: - raise IOError( - 'No analog signals found - check "structure.oebin" file or "continuous" directory' - ) - - if probe_data.probe_model in supported_probe_types: - probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - electrode_group_members = [ - probe_electrodes[channel_idx] - for channel_idx in probe_data.ap_meta["channels_indices"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels" - " probe model {} not yet implemented".format(probe_data.probe_model) - ) - - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": probe_data.ap_meta["sample_rate"], - "recording_datetime": probe_data.recording_info[ - "recording_datetimes" - ][0], - "recording_duration": np.sum( - probe_data.recording_info["recording_durations"] - ), - } - ) - - root_dir = find_root_directory( - get_ephys_root_data_dir(), - probe_data.recording_info["recording_files"][0], - ) - self.EphysFile.insert( - [ - {**key, "file_path": fp.relative_to(root_dir).as_posix()} - for fp in probe_data.recording_info["recording_files"] - ] - ) - # explicitly garbage collect "dataset" - # as these may have large memory footprint and may not be cleared fast enough - del probe_data, dataset - gc.collect() - else: - raise NotImplementedError( - f"Processing ephys files from" - f" acquisition software of type {acq_software} is" - f" not yet implemented" - ) - - -@schema -class LFP(dj.Imported): - """Extracts local field potentials (LFP) from an electrophysiology recording. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - lfp_sampling_rate (float): Sampling rate for LFPs in Hz. - lfp_time_stamps (longblob): Time stamps with respect to the start of the recording. - lfp_mean (longblob): Overall mean LFP across electrodes. - """ - - definition = """ - # Acquired local field potential (LFP) from a given Ephys recording. - -> EphysRecording - --- - lfp_sampling_rate: float # (Hz) - lfp_time_stamps: longblob # (s) timestamps with respect to the start of the recording (recording_timestamp) - lfp_mean: longblob # (uV) mean of LFP across electrodes - shape (time,) - """ - - class Electrode(dj.Part): - """Saves local field potential data for each electrode. - - Attributes: - LFP (foreign key): LFP primary key. - probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key. - lfp (longblob): LFP recording at this electrode in microvolts. - """ - - definition = """ - -> master - -> probe.ElectrodeConfig.Electrode - --- - lfp: longblob # (uV) recorded lfp at this electrode - """ - - # Only store LFP for every 9th channel, due to high channel density, - # close-by channels exhibit highly similar LFP - _skip_channel_counts = 9 - - def make(self, key): - """Populates the LFP tables.""" - acq_software = (EphysRecording * ProbeInsertion & key).fetch1("acq_software") - - electrode_keys, lfp = [], [] - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - - lfp_channel_ind = spikeglx_recording.lfmeta.recording_channels[ - -1 :: -self._skip_channel_counts - ] - - # Extract LFP data at specified channels and convert to uV - lfp = spikeglx_recording.lf_timeseries[ - :, lfp_channel_ind - ] # (sample x channel) - lfp = ( - lfp * spikeglx_recording.get_channel_bit_volts("lf")[lfp_channel_ind] - ).T # (channel x sample) - - self.insert1( - dict( - key, - lfp_sampling_rate=spikeglx_recording.lfmeta.meta["imSampRate"], - lfp_time_stamps=( - np.arange(lfp.shape[1]) - / spikeglx_recording.lfmeta.meta["imSampRate"] - ), - lfp_mean=lfp.mean(axis=0), - ) - ) - - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - for recorded_site in lfp_channel_ind: - shank, shank_col, shank_row, _ = spikeglx_recording.apmeta.shankmap[ - "data" - ][recorded_site] - electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) - elif acq_software == "Open Ephys": - oe_probe = get_openephys_probe_data(key) - - lfp_channel_ind = np.r_[ - len(oe_probe.lfp_meta["channels_indices"]) - - 1 : 0 : -self._skip_channel_counts - ] - - # (sample x channel) - lfp = oe_probe.lfp_timeseries[:, lfp_channel_ind] - lfp = ( - lfp * np.array(oe_probe.lfp_meta["channels_gains"])[lfp_channel_ind] - ).T # (channel x sample) - lfp_timestamps = oe_probe.lfp_timestamps - - self.insert1( - dict( - key, - lfp_sampling_rate=oe_probe.lfp_meta["sample_rate"], - lfp_time_stamps=lfp_timestamps, - lfp_mean=lfp.mean(axis=0), - ) - ) - - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - electrode_keys.extend( - probe_electrodes[channel_idx] for channel_idx in lfp_channel_ind - ) - else: - raise NotImplementedError( - f"LFP extraction from acquisition software" - f" of type {acq_software} is not yet implemented" - ) - - # single insert in loop to mitigate potential memory issue - for electrode_key, lfp_trace in zip(electrode_keys, lfp): - self.Electrode.insert1({**key, **electrode_key, "lfp": lfp_trace}) - - -# ------------ Clustering -------------- - - -@schema -class ClusteringMethod(dj.Lookup): - """Kilosort clustering method. - - Attributes: - clustering_method (foreign key, varchar(16) ): Kilosort clustering method. - clustering_methods_desc (varchar(1000) ): Additional description of the clustering method. - """ - - definition = """ - # Method for clustering - clustering_method: varchar(16) - --- - clustering_method_desc: varchar(1000) - """ - - contents = [ - ("kilosort2", "kilosort2 clustering method"), - ("kilosort2.5", "kilosort2.5 clustering method"), - ("kilosort3", "kilosort3 clustering method"), - ] - - -@schema -class ClusteringParamSet(dj.Lookup): - """Parameters to be used in clustering procedure for spike sorting. - - Attributes: - paramset_idx (foreign key): Unique ID for the clustering parameter set. - ClusteringMethod (dict): ClusteringMethod primary key. - paramset_desc (varchar(128) ): Description of the clustering parameter set. - param_set_hash (uuid): UUID hash for the parameter set. - params (longblob): Parameters for clustering with Kilosort. - """ - - definition = """ - # Parameter set to be used in a clustering procedure - paramset_idx: smallint - --- - -> ClusteringMethod - paramset_desc: varchar(128) - param_set_hash: uuid - unique index (param_set_hash) - params: longblob # dictionary of all applicable parameters - """ - - @classmethod - def insert_new_params( - cls, - clustering_method: str, - paramset_desc: str, - params: dict, - paramset_idx: int = None, - ): - """Inserts new parameters into the ClusteringParamSet table. - - Args: - clustering_method (str): name of the clustering method. - paramset_desc (str): description of the parameter set - params (dict): clustering parameters - paramset_idx (int, optional): Unique parameter set ID. Defaults to None. - """ - if paramset_idx is None: - paramset_idx = ( - dj.U().aggr(cls, n="max(paramset_idx)").fetch1("n") or 0 - ) + 1 - - param_dict = { - "clustering_method": clustering_method, - "paramset_idx": paramset_idx, - "paramset_desc": paramset_desc, - "params": params, - "param_set_hash": dict_to_uuid( - {**params, "clustering_method": clustering_method} - ), - } - param_query = cls & {"param_set_hash": param_dict["param_set_hash"]} - - if param_query: # If the specified param-set already exists - existing_paramset_idx = param_query.fetch1("paramset_idx") - if ( - existing_paramset_idx == paramset_idx - ): # If the existing set has the same paramset_idx: job done - return - else: # If not same name: human error, trying to add the same paramset with different name - raise dj.DataJointError( - f"The specified param-set already exists" - f" - with paramset_idx: {existing_paramset_idx}" - ) - else: - if {"paramset_idx": paramset_idx} in cls.proj(): - raise dj.DataJointError( - f"The specified paramset_idx {paramset_idx} already exists," - f" please pick a different one." - ) - cls.insert1(param_dict) - - -@schema -class ClusterQualityLabel(dj.Lookup): - """Quality label for each spike sorted cluster. - - Attributes: - cluster_quality_label (foreign key, varchar(100) ): Cluster quality type. - cluster_quality_description (varchar(4000) ): Description of the cluster quality type. - """ - - definition = """ - # Quality - cluster_quality_label: varchar(100) # cluster quality type - e.g. 'good', 'MUA', 'noise', etc. - --- - cluster_quality_description: varchar(4000) - """ - contents = [ - ("good", "single unit"), - ("ok", "probably a single unit, but could be contaminated"), - ("mua", "multi-unit activity"), - ("noise", "bad unit"), - ] - - -@schema -class ClusteringTask(dj.Manual): - """A clustering task to spike sort electrophysiology datasets. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - ClusteringParamSet (foreign key): ClusteringParamSet primary key. - clustering_outdir_dir (varchar (255) ): Relative path to output clustering results. - task_mode (enum): `Trigger` computes clustering or and `load` imports existing data. - """ - - definition = """ - # Manual table for defining a clustering task ready to be run - -> EphysRecording - -> ClusteringParamSet - --- - clustering_output_dir='': varchar(255) # clustering output directory relative to the clustering root data directory - task_mode='load': enum('load', 'trigger') # 'load': load computed analysis results, 'trigger': trigger computation - """ - - @classmethod - def infer_output_dir(cls, key, relative=False, mkdir=False) -> pathlib.Path: - """Infer output directory if it is not provided. - - Args: - key (dict): ClusteringTask primary key. - - Returns: - Expected clustering_output_dir based on the following convention: - processed_dir / session_dir / probe_{insertion_number} / {clustering_method}_{paramset_idx} - e.g.: sub4/sess1/probe_2/kilosort2_0 - """ - processed_dir = pathlib.Path(get_processed_root_data_dir()) - sess_dir = find_full_path(get_ephys_root_data_dir(), get_session_directory(key)) - root_dir = find_root_directory(get_ephys_root_data_dir(), sess_dir) - - method = ( - (ClusteringParamSet * ClusteringMethod & key) - .fetch1("clustering_method") - .replace(".", "-") - ) - - output_dir = ( - processed_dir - / sess_dir.relative_to(root_dir) - / f'probe_{key["insertion_number"]}' - / f'{method}_{key["paramset_idx"]}' - ) - - if mkdir: - output_dir.mkdir(parents=True, exist_ok=True) - log.info(f"{output_dir} created!") - - return output_dir.relative_to(processed_dir) if relative else output_dir - - @classmethod - def auto_generate_entries(cls, ephys_recording_key: dict, paramset_idx: int = 0): - """Autogenerate entries based on a particular ephys recording. - - Args: - ephys_recording_key (dict): EphysRecording primary key. - paramset_idx (int, optional): Parameter index to use for clustering task. Defaults to 0. - """ - key = {**ephys_recording_key, "paramset_idx": paramset_idx} - - processed_dir = get_processed_root_data_dir() - output_dir = ClusteringTask.infer_output_dir(key, relative=False, mkdir=True) - - try: - kilosort.Kilosort( - output_dir - ) # check if the directory is a valid Kilosort output - except FileNotFoundError: - task_mode = "trigger" - else: - task_mode = "load" - - cls.insert1( - { - **key, - "clustering_output_dir": output_dir.relative_to( - processed_dir - ).as_posix(), - "task_mode": task_mode, - } - ) - - -@schema -class Clustering(dj.Imported): - """A processing table to handle each clustering task. - - Attributes: - ClusteringTask (foreign key): ClusteringTask primary key. - clustering_time (datetime): Time when clustering results are generated. - package_version (varchar(16) ): Package version used for a clustering analysis. - """ - - definition = """ - # Clustering Procedure - -> ClusteringTask - --- - clustering_time: datetime # time of generation of this set of clustering results - package_version='': varchar(16) - """ - - def make(self, key): - """Triggers or imports clustering analysis.""" - task_mode, output_dir = (ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - - if not output_dir: - output_dir = ClusteringTask.infer_output_dir(key, relative=True, mkdir=True) - # update clustering_output_dir - ClusteringTask.update1( - {**key, "clustering_output_dir": output_dir.as_posix()} - ) - - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - if task_mode == "load": - kilosort.Kilosort( - kilosort_dir - ) # check if the directory is a valid Kilosort output - elif task_mode == "trigger": - acq_software, clustering_method, params = ( - ClusteringTask * EphysRecording * ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method", "params") - - if "kilosort" in clustering_method: - from element_array_ephys.readers import kilosort_triggering - - # add additional probe-recording and channels details into `params` - params = {**params, **get_recording_channels_details(key)} - params["fs"] = params["sample_rate"] - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX( - spikeglx_meta_filepath.parent - ) - spikeglx_recording.validate_file("ap") - run_CatGT = ( - params.pop("run_CatGT", True) - and "_tcat." not in spikeglx_meta_filepath.stem - ) - - if clustering_method.startswith("pykilosort"): - kilosort_triggering.run_pykilosort( - continuous_file=spikeglx_recording.root_dir - / (spikeglx_recording.root_name + ".ap.bin"), - kilosort_output_directory=kilosort_dir, - channel_ind=params.pop("channel_ind"), - x_coords=params.pop("x_coords"), - y_coords=params.pop("y_coords"), - shank_ind=params.pop("shank_ind"), - connected=params.pop("connected"), - sample_rate=params.pop("sample_rate"), - params=params, - ) - else: - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=run_CatGT, - ) - run_kilosort.run_modules() - elif acq_software == "Open Ephys": - oe_probe = get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - if clustering_method.startswith("pykilosort"): - kilosort_triggering.run_pykilosort( - continuous_file=pathlib.Path( - oe_probe.recording_info["recording_files"][0] - ) - / "continuous.dat", - kilosort_output_directory=kilosort_dir, - channel_ind=params.pop("channel_ind"), - x_coords=params.pop("x_coords"), - y_coords=params.pop("y_coords"), - shank_ind=params.pop("shank_ind"), - connected=params.pop("connected"), - sample_rate=params.pop("sample_rate"), - params=params, - ) - else: - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort.run_modules() - else: - raise NotImplementedError( - f"Automatic triggering of {clustering_method}" - f" clustering analysis is not yet supported" - ) - - else: - raise ValueError(f"Unknown task mode: {task_mode}") - - creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) - - -@schema -class Curation(dj.Manual): - """Curation procedure table. - - Attributes: - Clustering (foreign key): Clustering primary key. - curation_id (foreign key, int): Unique curation ID. - curation_time (datetime): Time when curation results are generated. - curation_output_dir (varchar(255) ): Output directory of the curated results. - quality_control (bool): If True, this clustering result has undergone quality control. - manual_curation (bool): If True, manual curation has been performed on this clustering result. - curation_note (varchar(2000) ): Notes about the curation task. - """ - - definition = """ - # Manual curation procedure - -> Clustering - curation_id: int - --- - curation_time: datetime # time of generation of this set of curated clustering results - curation_output_dir: varchar(255) # output directory of the curated results, relative to root data directory - quality_control: bool # has this clustering result undergone quality control? - manual_curation: bool # has manual curation been performed on this clustering result? - curation_note='': varchar(2000) - """ - - def create1_from_clustering_task(self, key, curation_note: str = ""): - """ - A function to create a new corresponding "Curation" for a particular - "ClusteringTask" - """ - if key not in Clustering(): - raise ValueError( - f"No corresponding entry in Clustering available" - f" for: {key}; do `Clustering.populate(key)`" - ) - - task_mode, output_dir = (ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - creation_time, is_curated, is_qc = kilosort.extract_clustering_info( - kilosort_dir - ) - # Synthesize curation_id - curation_id = ( - dj.U().aggr(self & key, n="ifnull(max(curation_id)+1,1)").fetch1("n") - ) - self.insert1( - { - **key, - "curation_id": curation_id, - "curation_time": creation_time, - "curation_output_dir": output_dir, - "quality_control": is_qc, - "manual_curation": is_curated, - "curation_note": curation_note, - } - ) - - -@schema -class CuratedClustering(dj.Imported): - """Clustering results after curation. - - Attributes: - Curation (foreign key): Curation primary key. - """ - - definition = """ - # Clustering results of a curation. - -> Curation - """ - - class Unit(dj.Part): - """Single unit properties after clustering and curation. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - unit (foreign key, int): Unique integer identifying a single unit. - probe.ElectrodeConfig.Electrode (dict): probe.ElectrodeConfig.Electrode primary key. - ClusteringQualityLabel (dict): CLusteringQualityLabel primary key. - spike_count (int): Number of spikes in this recording for this unit. - spike_times (longblob): Spike times of this unit, relative to start time of EphysRecording. - spike_sites (longblob): Array of electrode associated with each spike. - spike_depths (longblob): Array of depths associated with each spike, relative to each spike. - """ - - definition = """ - # Properties of a given unit from a round of clustering (and curation) - -> master - unit: int - --- - -> probe.ElectrodeConfig.Electrode # electrode with highest waveform amplitude for this unit - -> ClusterQualityLabel - spike_count: int # how many spikes in this recording for this unit - spike_times: longblob # (s) spike times of this unit, relative to the start of the EphysRecording - spike_sites : longblob # array of electrode associated with each spike - spike_depths=null : longblob # (um) array of depths associated with each spike, relative to the (0, 0) of the probe - """ - - def make(self, key): - """Automated population of Unit information.""" - output_dir = (Curation & key).fetch1("curation_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - acq_software, sample_rate = (EphysRecording & key).fetch1( - "acq_software", "sampling_rate" - ) - - sample_rate = kilosort_dataset.data["params"].get("sample_rate", sample_rate) - - # ---------- Unit ---------- - # -- Remove 0-spike units - withspike_idx = [ - i - for i, u in enumerate(kilosort_dataset.data["cluster_ids"]) - if (kilosort_dataset.data["spike_clusters"] == u).any() - ] - valid_units = kilosort_dataset.data["cluster_ids"][withspike_idx] - valid_unit_labels = kilosort_dataset.data["cluster_groups"][withspike_idx] - # -- Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map(key, acq_software) - - # -- Spike-times -- - # spike_times_sec_adj > spike_times_sec > spike_times - spike_time_key = ( - "spike_times_sec_adj" - if "spike_times_sec_adj" in kilosort_dataset.data - else ( - "spike_times_sec" - if "spike_times_sec" in kilosort_dataset.data - else "spike_times" - ) - ) - spike_times = kilosort_dataset.data[spike_time_key] - kilosort_dataset.extract_spike_depths() - - # -- Spike-sites and Spike-depths -- - spike_sites = np.array( - [ - channel2electrodes[s]["electrode"] - for s in kilosort_dataset.data["spike_sites"] - ] - ) - spike_depths = kilosort_dataset.data["spike_depths"] - - # -- Insert unit, label, peak-chn - units = [] - for unit, unit_lbl in zip(valid_units, valid_unit_labels): - if (kilosort_dataset.data["spike_clusters"] == unit).any(): - unit_channel, _ = kilosort_dataset.get_best_channel(unit) - unit_spike_times = ( - spike_times[kilosort_dataset.data["spike_clusters"] == unit] - / sample_rate - ) - spike_count = len(unit_spike_times) - - units.append( - { - "unit": unit, - "cluster_quality_label": unit_lbl, - **channel2electrodes[unit_channel], - "spike_times": unit_spike_times, - "spike_count": spike_count, - "spike_sites": spike_sites[ - kilosort_dataset.data["spike_clusters"] == unit - ], - "spike_depths": ( - spike_depths[ - kilosort_dataset.data["spike_clusters"] == unit - ] - if spike_depths is not None - else None - ), - } - ) - - self.insert1(key) - self.Unit.insert([{**key, **u} for u in units]) - - -@schema -class WaveformSet(dj.Imported): - """A set of spike waveforms for units out of a given CuratedClustering. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - """ - - definition = """ - # A set of spike waveforms for units out of a given CuratedClustering - -> CuratedClustering - """ - - class PeakWaveform(dj.Part): - """Mean waveform across spikes for a given unit. - - Attributes: - WaveformSet (foreign key): WaveformSet primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - peak_electrode_waveform (longblob): Mean waveform for a given unit at its representative electrode. - """ - - definition = """ - # Mean waveform across spikes for a given unit at its representative electrode - -> master - -> CuratedClustering.Unit - --- - peak_electrode_waveform: longblob # (uV) mean waveform for a given unit at its representative electrode - """ - - class Waveform(dj.Part): - """Spike waveforms for a given unit. - - Attributes: - WaveformSet (foreign key): WaveformSet primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key. - waveform_mean (longblob): mean waveform across spikes of the unit in microvolts. - waveforms (longblob): waveforms of a sampling of spikes at the given electrode and unit. - """ - - definition = """ - # Spike waveforms and their mean across spikes for the given unit - -> master - -> CuratedClustering.Unit - -> probe.ElectrodeConfig.Electrode - --- - waveform_mean: longblob # (uV) mean waveform across spikes of the given unit - waveforms=null: longblob # (uV) (spike x sample) waveforms of a sampling of spikes at the given electrode for the given unit - """ - - def make(self, key): - """Populates waveform tables.""" - output_dir = (Curation & key).fetch1("curation_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - - acq_software, probe_serial_number = ( - EphysRecording * ProbeInsertion & key - ).fetch1("acq_software", "probe") - - # -- Get channel and electrode-site mapping - recording_key = (EphysRecording & key).fetch1("KEY") - channel2electrodes = get_neuropixels_channel2electrode_map( - recording_key, acq_software - ) - - is_qc = (Curation & key).fetch1("quality_control") - - # Get all units - units = { - u["unit"]: u - for u in (CuratedClustering.Unit & key).fetch(as_dict=True, order_by="unit") - } - - if is_qc: - unit_waveforms = np.load( - kilosort_dir / "mean_waveforms.npy" - ) # unit x channel x sample - - def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - if unit_no in units: - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], unit_waveform - ): - unit_electrode_waveforms.append( - { - **units[unit_no], - **channel2electrodes[channel], - "waveform_mean": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == units[unit_no]["electrode"] - ): - unit_peak_waveform = { - **units[unit_no], - "peak_electrode_waveform": channel_waveform, - } - yield unit_peak_waveform, unit_electrode_waveforms - - else: - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - elif acq_software == "Open Ephys": - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - openephys_dataset = openephys.OpenEphys(session_dir) - neuropixels_recording = openephys_dataset.probes[probe_serial_number] - - def yield_unit_waveforms(): - for unit_dict in units.values(): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - - spikes = unit_dict["spike_times"] - waveforms = neuropixels_recording.extract_spike_waveforms( - spikes, kilosort_dataset.data["channel_map"] - ) # (sample x channel x spike) - waveforms = waveforms.transpose( - (1, 2, 0) - ) # (channel x spike x sample) - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], waveforms - ): - unit_electrode_waveforms.append( - { - **unit_dict, - **channel2electrodes[channel], - "waveform_mean": channel_waveform.mean(axis=0), - "waveforms": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == unit_dict["electrode"] - ): - unit_peak_waveform = { - **unit_dict, - "peak_electrode_waveform": channel_waveform.mean( - axis=0 - ), - } - - yield unit_peak_waveform, unit_electrode_waveforms - - # insert waveform on a per-unit basis to mitigate potential memory issue - self.insert1(key) - for unit_peak_waveform, unit_electrode_waveforms in yield_unit_waveforms(): - if unit_peak_waveform: - self.PeakWaveform.insert1(unit_peak_waveform, ignore_extra_fields=True) - if unit_electrode_waveforms: - self.Waveform.insert(unit_electrode_waveforms, ignore_extra_fields=True) - - -@schema -class QualityMetrics(dj.Imported): - """Clustering and waveform quality metrics. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - """ - - definition = """ - # Clusters and waveforms metrics - -> CuratedClustering - """ - - class Cluster(dj.Part): - """Cluster metrics for a unit. - - Attributes: - QualityMetrics (foreign key): QualityMetrics primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - firing_rate (float): Firing rate of the unit. - snr (float): Signal-to-noise ratio for a unit. - presence_ratio (float): Fraction of time where spikes are present. - isi_violation (float): rate of ISI violation as a fraction of overall rate. - number_violation (int): Total ISI violations. - amplitude_cutoff (float): Estimate of miss rate based on amplitude histogram. - isolation_distance (float): Distance to nearest cluster. - l_ratio (float): Amount of empty space between a cluster and other spikes in dataset. - d_prime (float): Classification accuracy based on LDA. - nn_hit_rate (float): Fraction of neighbors for target cluster that are also in target cluster. - nn_miss_rate (float): Fraction of neighbors outside target cluster that are in the target cluster. - silhouette_core (float): Maximum change in spike depth throughout recording. - cumulative_drift (float): Cumulative change in spike depth throughout recording. - contamination_rate (float): Frequency of spikes in the refractory period. - """ - - definition = """ - # Cluster metrics for a particular unit - -> master - -> CuratedClustering.Unit - --- - firing_rate=null: float # (Hz) firing rate for a unit - snr=null: float # signal-to-noise ratio for a unit - presence_ratio=null: float # fraction of time in which spikes are present - isi_violation=null: float # rate of ISI violation as a fraction of overall rate - number_violation=null: int # total number of ISI violations - amplitude_cutoff=null: float # estimate of miss rate based on amplitude histogram - isolation_distance=null: float # distance to nearest cluster in Mahalanobis space - l_ratio=null: float # - d_prime=null: float # Classification accuracy based on LDA - nn_hit_rate=null: float # Fraction of neighbors for target cluster that are also in target cluster - nn_miss_rate=null: float # Fraction of neighbors outside target cluster that are in target cluster - silhouette_score=null: float # Standard metric for cluster overlap - max_drift=null: float # Maximum change in spike depth throughout recording - cumulative_drift=null: float # Cumulative change in spike depth throughout recording - contamination_rate=null: float # - """ - - class Waveform(dj.Part): - """Waveform metrics for a particular unit. - - Attributes: - QualityMetrics (foreign key): QualityMetrics primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - amplitude (float): Absolute difference between waveform peak and trough in microvolts. - duration (float): Time between waveform peak and trough in milliseconds. - halfwidth (float): Spike width at half max amplitude. - pt_ratio (float): Absolute amplitude of peak divided by absolute amplitude of trough relative to 0. - repolarization_slope (float): Slope of the regression line fit to first 30 microseconds from trough to peak. - recovery_slope (float): Slope of the regression line fit to first 30 microseconds from peak to tail. - spread (float): The range with amplitude over 12-percent of maximum amplitude along the probe. - velocity_above (float): inverse velocity of waveform propagation from soma to the top of the probe. - velocity_below (float): inverse velocity of waveform propagation from soma toward the bottom of the probe. - """ - - definition = """ - # Waveform metrics for a particular unit - -> master - -> CuratedClustering.Unit - --- - amplitude: float # (uV) absolute difference between waveform peak and trough - duration: float # (ms) time between waveform peak and trough - halfwidth=null: float # (ms) spike width at half max amplitude - pt_ratio=null: float # absolute amplitude of peak divided by absolute amplitude of trough relative to 0 - repolarization_slope=null: float # the repolarization slope was defined by fitting a regression line to the first 30us from trough to peak - recovery_slope=null: float # the recovery slope was defined by fitting a regression line to the first 30us from peak to tail - spread=null: float # (um) the range with amplitude above 12-percent of the maximum amplitude along the probe - velocity_above=null: float # (s/m) inverse velocity of waveform propagation from the soma toward the top of the probe - velocity_below=null: float # (s/m) inverse velocity of waveform propagation from the soma toward the bottom of the probe - """ - - def make(self, key): - """Populates tables with quality metrics data.""" - output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - metric_fp = kilosort_dir / "metrics.csv" - rename_dict = { - "isi_viol": "isi_violation", - "num_viol": "number_violation", - "contam_rate": "contamination_rate", - } - - if not metric_fp.exists(): - raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") - - metrics_df = pd.read_csv(metric_fp) - metrics_df.set_index("cluster_id", inplace=True) - metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True) - metrics_df.columns = metrics_df.columns.str.lower() - metrics_df.rename(columns=rename_dict, inplace=True) - metrics_list = [ - dict(metrics_df.loc[unit_key["unit"]], **unit_key) - for unit_key in (CuratedClustering.Unit & key).fetch("KEY") - ] - - self.insert1(key) - self.Cluster.insert(metrics_list, ignore_extra_fields=True) - self.Waveform.insert(metrics_list, ignore_extra_fields=True) - - -# ---------------- HELPER FUNCTIONS ---------------- - - -def get_spikeglx_meta_filepath(ephys_recording_key: dict) -> str: - """Get spikeGLX data filepath.""" - # attempt to retrieve from EphysRecording.EphysFile - spikeglx_meta_filepath = pathlib.Path( - ( - EphysRecording.EphysFile - & ephys_recording_key - & 'file_path LIKE "%.ap.meta"' - ).fetch1("file_path") - ) - - try: - spikeglx_meta_filepath = find_full_path( - get_ephys_root_data_dir(), spikeglx_meta_filepath - ) - except FileNotFoundError: - # if not found, search in session_dir again - if not spikeglx_meta_filepath.exists(): - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(ephys_recording_key) - ) - inserted_probe_serial_number = ( - ProbeInsertion * probe.Probe & ephys_recording_key - ).fetch1("probe") - - spikeglx_meta_filepaths = [fp for fp in session_dir.rglob("*.ap.meta")] - for meta_filepath in spikeglx_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - spikeglx_meta_filepath = meta_filepath - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format( - ephys_recording_key - ) - ) - - return spikeglx_meta_filepath - - -def get_openephys_probe_data(ephys_recording_key: dict) -> list: - """Get OpenEphys probe data from file.""" - inserted_probe_serial_number = ( - ProbeInsertion * probe.Probe & ephys_recording_key - ).fetch1("probe") - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(ephys_recording_key) - ) - loaded_oe = openephys.OpenEphys(session_dir) - probe_data = loaded_oe.probes[inserted_probe_serial_number] - - # explicitly garbage collect "loaded_oe" - # as these may have large memory footprint and may not be cleared fast enough - del loaded_oe - gc.collect() - - return probe_data - - -def get_neuropixels_channel2electrode_map( - ephys_recording_key: dict, acq_software: str -) -> dict: - """Get the channel map for neuropixels probe.""" - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) - spikeglx_meta = spikeglx.SpikeGLXMeta(spikeglx_meta_filepath) - electrode_config_key = ( - EphysRecording * probe.ElectrodeConfig & ephys_recording_key - ).fetch1("KEY") - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & electrode_config_key - ) - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - channel2electrode_map = { - recorded_site: probe_electrodes[(shank, shank_col, shank_row)] - for recorded_site, (shank, shank_col, shank_row, _) in enumerate( - spikeglx_meta.shankmap["data"] - ) - } - elif acq_software == "Open Ephys": - probe_dataset = get_openephys_probe_data(ephys_recording_key) - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode * EphysRecording - & ephys_recording_key - ) - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - channel2electrode_map = { - channel_idx: probe_electrodes[channel_idx] - for channel_idx in probe_dataset.ap_meta["channels_indices"] - } - - return channel2electrode_map - - -def generate_electrode_config(probe_type: str, electrode_keys: list) -> dict: - """Generate and insert new ElectrodeConfig - - Args: - probe_type (str): probe type (e.g. neuropixels 2.0 - SS) - electrode_keys (list): list of keys of the probe.ProbeType.Electrode table - - Returns: - dict: representing a key of the probe.ElectrodeConfig table - """ - # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid({k["electrode"]: k for k in electrode_keys}) - - electrode_list = sorted([k["electrode"] for k in electrode_keys]) - electrode_gaps = ( - [-1] - + np.where(np.diff(electrode_list) > 1)[0].tolist() - + [len(electrode_list) - 1] - ) - electrode_config_name = "; ".join( - [ - f"{electrode_list[start + 1]}-{electrode_list[end]}" - for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) - ] - ) - - electrode_config_key = {"electrode_config_hash": electrode_config_hash} - - # ---- make new ElectrodeConfig if needed ---- - if not probe.ElectrodeConfig & electrode_config_key: - probe.ElectrodeConfig.insert1( - { - **electrode_config_key, - "probe_type": probe_type, - "electrode_config_name": electrode_config_name, - } - ) - probe.ElectrodeConfig.Electrode.insert( - {**electrode_config_key, **electrode} for electrode in electrode_keys - ) - - return electrode_config_key - - -def get_recording_channels_details(ephys_recording_key: dict) -> np.array: - """Get details of recording channels for a given recording.""" - channels_details = {} - - acq_software, sample_rate = (EphysRecording & ephys_recording_key).fetch1( - "acq_software", "sampling_rate" - ) - - probe_type = (ProbeInsertion * probe.Probe & ephys_recording_key).fetch1( - "probe_type" - ) - channels_details["probe_type"] = { - "neuropixels 1.0 - 3A": "3A", - "neuropixels 1.0 - 3B": "NP1", - "neuropixels UHD": "NP1100", - "neuropixels 2.0 - SS": "NP21", - "neuropixels 2.0 - MS": "NP24", - }[probe_type] - - electrode_config_key = ( - probe.ElectrodeConfig * EphysRecording & ephys_recording_key - ).fetch1("KEY") - ( - channels_details["channel_ind"], - channels_details["x_coords"], - channels_details["y_coords"], - channels_details["shank_ind"], - ) = ( - probe.ElectrodeConfig.Electrode * probe.ProbeType.Electrode - & electrode_config_key - ).fetch( - "electrode", "x_coord", "y_coord", "shank" - ) - channels_details["sample_rate"] = sample_rate - channels_details["num_channels"] = len(channels_details["channel_ind"]) - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - channels_details["uVPerBit"] = spikeglx_recording.get_channel_bit_volts("ap")[0] - channels_details["connected"] = np.array( - [v for *_, v in spikeglx_recording.apmeta.shankmap["data"]] - ) - elif acq_software == "Open Ephys": - oe_probe = get_openephys_probe_data(ephys_recording_key) - channels_details["uVPerBit"] = oe_probe.ap_meta["channels_gains"][0] - channels_details["connected"] = np.array( - [ - int(v == 1) - for c, v in oe_probe.channels_connected.items() - if c in channels_details["channel_ind"] - ] - ) - - return channels_details diff --git a/element_array_ephys/ephys_precluster.py b/element_array_ephys/ephys_precluster.py deleted file mode 100644 index 4d52c610..00000000 --- a/element_array_ephys/ephys_precluster.py +++ /dev/null @@ -1,1435 +0,0 @@ -import importlib -import inspect -import re - -import datajoint as dj -import numpy as np -import pandas as pd -from element_interface.utils import dict_to_uuid, find_full_path, find_root_directory - -from . import ephys_report, probe -from .readers import kilosort, openephys, spikeglx - -schema = dj.schema() - -_linking_module = None - - -def activate( - ephys_schema_name: str, - probe_schema_name: str = None, - *, - create_schema: bool = True, - create_tables: bool = True, - linking_module: str = None, -): - """Activates the `ephys` and `probe` schemas. - - Args: - ephys_schema_name (str): A string containing the name of the ephys schema. - probe_schema_name (str): A string containing the name of the probe schema. - create_schema (bool): If True, schema will be created in the database. - create_tables (bool): If True, tables related to the schema will be created in the database. - linking_module (str): A string containing the module name or module containing the required dependencies to activate the schema. - - Dependencies: - Upstream tables: - Session: A parent table to ProbeInsertion - Probe: A parent table to EphysRecording. Probe information is required before electrophysiology data is imported. - - Functions: - get_ephys_root_data_dir(): Returns absolute path for root data director(y/ies) with all electrophysiological recording sessions, as a list of string(s). - get_session_direction(session_key: dict): Returns path to electrophysiology data for the a particular session as a list of strings. - """ - - if isinstance(linking_module, str): - linking_module = importlib.import_module(linking_module) - assert inspect.ismodule( - linking_module - ), "The argument 'dependency' must be a module's name or a module" - - global _linking_module - _linking_module = linking_module - - probe.activate( - probe_schema_name, create_schema=create_schema, create_tables=create_tables - ) - schema.activate( - ephys_schema_name, - create_schema=create_schema, - create_tables=create_tables, - add_objects=_linking_module.__dict__, - ) - ephys_report.activate(f"{ephys_schema_name}_report", ephys_schema_name) - - -# -------------- Functions required by the elements-ephys --------------- - - -def get_ephys_root_data_dir() -> list: - """Fetches absolute data path to ephys data directories. - - The absolute path here is used as a reference for all downstream relative paths used in DataJoint. - - Returns: - A list of the absolute path(s) to ephys data directories. - """ - return _linking_module.get_ephys_root_data_dir() - - -def get_session_directory(session_key: dict) -> str: - """Retrieve the session directory with Neuropixels for the given session. - - Args: - session_key (dict): A dictionary mapping subject to an entry in the subject table, and session_datetime corresponding to a session in the database. - - Returns: - A string for the path to the session directory. - """ - return _linking_module.get_session_directory(session_key) - - -# ----------------------------- Table declarations ---------------------- - - -@schema -class AcquisitionSoftware(dj.Lookup): - """Name of software used for recording electrophysiological data. - - Attributes: - acq_software ( varchar(24) ): Acquisition software, e.g,. SpikeGLX, OpenEphys - """ - - definition = """ # Name of software used for recording of neuropixels probes - SpikeGLX or Open Ephys - acq_software: varchar(24) - """ - contents = zip(["SpikeGLX", "Open Ephys"]) - - -@schema -class ProbeInsertion(dj.Manual): - """Information about probe insertion across subjects and sessions. - - Attributes: - Session (foreign key): Session primary key. - insertion_number (foreign key, str): Unique insertion number for each probe insertion for a given session. - probe.Probe (str): probe.Probe primary key. - """ - - definition = """ - # Probe insertion implanted into an animal for a given session. - -> Session - insertion_number: tinyint unsigned - --- - -> probe.Probe - """ - - -@schema -class InsertionLocation(dj.Manual): - """Stereotaxic location information for each probe insertion. - - Attributes: - ProbeInsertion (foreign key): ProbeInsertion primary key. - SkullReference (dict): SkullReference primary key. - ap_location (decimal (6, 2) ): Anterior-posterior location in micrometers. Reference is 0 with anterior values positive. - ml_location (decimal (6, 2) ): Medial-lateral location in micrometers. Reference is zero with right side values positive. - depth (decimal (6, 2) ): Manipulator depth relative to the surface of the brain at zero. Ventral is negative. - Theta (decimal (5, 2) ): elevation - rotation about the ml-axis in degrees relative to positive z-axis. - phi (decimal (5, 2) ): azimuth - rotation about the dv-axis in degrees relative to the positive x-axis - - """ - - definition = """ - # Brain Location of a given probe insertion. - -> ProbeInsertion - --- - -> SkullReference - ap_location: decimal(6, 2) # (um) anterior-posterior; ref is 0; more anterior is more positive - ml_location: decimal(6, 2) # (um) medial axis; ref is 0 ; more right is more positive - depth: decimal(6, 2) # (um) manipulator depth relative to surface of the brain (0); more ventral is more negative - theta=null: decimal(5, 2) # (deg) - elevation - rotation about the ml-axis [0, 180] - w.r.t the z+ axis - phi=null: decimal(5, 2) # (deg) - azimuth - rotation about the dv-axis [0, 360] - w.r.t the x+ axis - beta=null: decimal(5, 2) # (deg) rotation about the shank of the probe [-180, 180] - clockwise is increasing in degree - 0 is the probe-front facing anterior - """ - - -@schema -class EphysRecording(dj.Imported): - """Automated table with electrophysiology recording information for each probe inserted during an experimental session. - - Attributes: - ProbeInsertion (foreign key): ProbeInsertion primary key. - probe.ElectrodeConfig (dict): probe.ElectrodeConfig primary key. - AcquisitionSoftware (dict): AcquisitionSoftware primary key. - sampling_rate (float): sampling rate of the recording in Hertz (Hz). - recording_datetime (datetime): datetime of the recording from this probe. - recording_duration (float): duration of the entire recording from this probe in seconds. - """ - - definition = """ - # Ephys recording from a probe insertion for a given session. - -> ProbeInsertion - --- - -> probe.ElectrodeConfig - -> AcquisitionSoftware - sampling_rate: float # (Hz) - recording_datetime: datetime # datetime of the recording from this probe - recording_duration: float # (seconds) duration of the recording from this probe - """ - - class EphysFile(dj.Part): - """Paths of electrophysiology recording files for each insertion. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - file_path (varchar(255) ): relative file path for electrophysiology recording. - """ - - definition = """ - # Paths of files of a given EphysRecording round. - -> master - file_path: varchar(255) # filepath relative to root data directory - """ - - def make(self, key): - """Populates table with electrophysiology recording information.""" - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - - inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1( - "probe" - ) - - # search session dir and determine acquisition software - for ephys_pattern, ephys_acq_type in ( - ("*.ap.meta", "SpikeGLX"), - ("*.oebin", "Open Ephys"), - ): - ephys_meta_filepaths = [fp for fp in session_dir.rglob(ephys_pattern)] - if ephys_meta_filepaths: - acq_software = ephys_acq_type - break - else: - raise FileNotFoundError( - f"Ephys recording data not found!" - f" Neither SpikeGLX nor Open Ephys recording files found" - f" in {session_dir}" - ) - - if acq_software == "SpikeGLX": - for meta_filepath in ephys_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format(key) - ) - - if re.search("(1.0|2.0)", spikeglx_meta.probe_model): - probe_type = spikeglx_meta.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - electrode_group_members = [ - probe_electrodes[(shank, shank_col, shank_row)] - for shank, shank_col, shank_row, _ in spikeglx_meta.shankmap["data"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels probe model" - " {} not yet implemented".format(spikeglx_meta.probe_model) - ) - - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": spikeglx_meta.meta["imSampRate"], - "recording_datetime": spikeglx_meta.recording_time, - "recording_duration": ( - spikeglx_meta.recording_duration - or spikeglx.retrieve_recording_duration(meta_filepath) - ), - } - ) - - root_dir = find_root_directory(get_ephys_root_data_dir(), meta_filepath) - self.EphysFile.insert1( - {**key, "file_path": meta_filepath.relative_to(root_dir).as_posix()} - ) - elif acq_software == "Open Ephys": - dataset = openephys.OpenEphys(session_dir) - for serial_number, probe_data in dataset.probes.items(): - if str(serial_number) == inserted_probe_serial_number: - break - else: - raise FileNotFoundError( - "No Open Ephys data found for probe insertion: {}".format(key) - ) - - if re.search("(1.0|2.0)", probe_data.probe_model): - probe_type = probe_data.probe_model - electrode_query = probe.ProbeType.Electrode & {"probe_type": probe_type} - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - electrode_group_members = [ - probe_electrodes[channel_idx] - for channel_idx in probe_data.ap_meta["channels_ids"] - ] - else: - raise NotImplementedError( - "Processing for neuropixels" - " probe model {} not yet implemented".format(probe_data.probe_model) - ) - - self.insert1( - { - **key, - **generate_electrode_config(probe_type, electrode_group_members), - "acq_software": acq_software, - "sampling_rate": probe_data.ap_meta["sample_rate"], - "recording_datetime": probe_data.recording_info[ - "recording_datetimes" - ][0], - "recording_duration": np.sum( - probe_data.recording_info["recording_durations"] - ), - } - ) - - root_dir = find_root_directory( - get_ephys_root_data_dir(), - probe_data.recording_info["recording_files"][0], - ) - self.EphysFile.insert( - [ - {**key, "file_path": fp.relative_to(root_dir).as_posix()} - for fp in probe_data.recording_info["recording_files"] - ] - ) - else: - raise NotImplementedError( - f"Processing ephys files from" - f" acquisition software of type {acq_software} is" - f" not yet implemented" - ) - - -@schema -class PreClusterMethod(dj.Lookup): - """Pre-clustering method - - Attributes: - precluster_method (foreign key, varchar(16) ): Pre-clustering method for the dataset. - precluster_method_desc(varchar(1000) ): Pre-clustering method description. - """ - - definition = """ - # Method for pre-clustering - precluster_method: varchar(16) - --- - precluster_method_desc: varchar(1000) - """ - - contents = [("catgt", "Time shift, Common average referencing, Zeroing")] - - -@schema -class PreClusterParamSet(dj.Lookup): - """Parameters for the pre-clustering method. - - Attributes: - paramset_idx (foreign key): Unique parameter set ID. - PreClusterMethod (dict): PreClusterMethod query for this dataset. - paramset_desc (varchar(128) ): Description for the pre-clustering parameter set. - param_set_hash (uuid): Unique hash for parameter set. - params (longblob): All parameters for the pre-clustering method. - """ - - definition = """ - # Parameter set to be used in a clustering procedure - paramset_idx: smallint - --- - -> PreClusterMethod - paramset_desc: varchar(128) - param_set_hash: uuid - unique index (param_set_hash) - params: longblob # dictionary of all applicable parameters - """ - - @classmethod - def insert_new_params( - cls, precluster_method: str, paramset_idx: int, paramset_desc: str, params: dict - ): - param_dict = { - "precluster_method": precluster_method, - "paramset_idx": paramset_idx, - "paramset_desc": paramset_desc, - "params": params, - "param_set_hash": dict_to_uuid(params), - } - param_query = cls & {"param_set_hash": param_dict["param_set_hash"]} - - if param_query: # If the specified param-set already exists - existing_paramset_idx = param_query.fetch1("paramset_idx") - if ( - existing_paramset_idx == paramset_idx - ): # If the existing set has the same paramset_idx: job done - return - else: # If not same name: human error, trying to add the same paramset with different name - raise dj.DataJointError( - "The specified param-set" - " already exists - paramset_idx: {}".format(existing_paramset_idx) - ) - else: - cls.insert1(param_dict) - - -@schema -class PreClusterParamSteps(dj.Manual): - """Ordered list of parameter sets that will be run. - - Attributes: - precluster_param_steps_id (foreign key): Unique ID for the pre-clustering parameter sets to be run. - precluster_param_steps_name (varchar(32) ): User-friendly name for the parameter steps. - precluster_param_steps_desc (varchar(128) ): Description of the parameter steps. - """ - - definition = """ - # Ordered list of paramset_idx that are to be run - # When pre-clustering is not performed, do not create an entry in `Step` Part table - precluster_param_steps_id: smallint - --- - precluster_param_steps_name: varchar(32) - precluster_param_steps_desc: varchar(128) - """ - - class Step(dj.Part): - """Define the order of operations for parameter sets. - - Attributes: - PreClusterParamSteps (foreign key): PreClusterParamSteps primary key. - step_number (foreign key, smallint): Order of operations. - PreClusterParamSet (dict): PreClusterParamSet to be used in pre-clustering. - """ - - definition = """ - -> master - step_number: smallint # Order of operations - --- - -> PreClusterParamSet - """ - - -@schema -class PreClusterTask(dj.Manual): - """Defines a pre-clustering task ready to be run. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - PreclusterParamSteps (foreign key): PreClusterParam Steps primary key. - precluster_output_dir (varchar(255) ): relative path to directory for storing results of pre-clustering. - task_mode (enum ): `none` (no pre-clustering), `load` results from file, or `trigger` automated pre-clustering. - """ - - definition = """ - # Manual table for defining a clustering task ready to be run - -> EphysRecording - -> PreClusterParamSteps - --- - precluster_output_dir='': varchar(255) # pre-clustering output directory relative to the root data directory - task_mode='none': enum('none','load', 'trigger') # 'none': no pre-clustering analysis - # 'load': load analysis results - # 'trigger': trigger computation - """ - - -@schema -class PreCluster(dj.Imported): - """ - A processing table to handle each PreClusterTask: - - Attributes: - PreClusterTask (foreign key): PreClusterTask primary key. - precluster_time (datetime): Time of generation of this set of pre-clustering results. - package_version (varchar(16) ): Package version used for performing pre-clustering. - """ - - definition = """ - -> PreClusterTask - --- - precluster_time: datetime # time of generation of this set of pre-clustering results - package_version='': varchar(16) - """ - - def make(self, key): - """Populate pre-clustering tables.""" - task_mode, output_dir = (PreClusterTask & key).fetch1( - "task_mode", "precluster_output_dir" - ) - precluster_output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - if task_mode == "none": - if len((PreClusterParamSteps.Step & key).fetch()) > 0: - raise ValueError( - "There are entries in the PreClusterParamSteps.Step " - "table and task_mode=none" - ) - creation_time = (EphysRecording & key).fetch1("recording_datetime") - elif task_mode == "load": - acq_software = (EphysRecording & key).fetch1("acq_software") - inserted_probe_serial_number = (ProbeInsertion * probe.Probe & key).fetch1( - "probe" - ) - - if acq_software == "SpikeGLX": - for meta_filepath in precluster_output_dir.rglob("*.ap.meta"): - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - creation_time = spikeglx_meta.recording_time - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format(key) - ) - else: - raise NotImplementedError( - f"Pre-clustering analysis of {acq_software}" "is not yet supported." - ) - elif task_mode == "trigger": - raise NotImplementedError( - "Automatic triggering of" - " pre-clustering analysis is not yet supported." - ) - else: - raise ValueError(f"Unknown task mode: {task_mode}") - - self.insert1({**key, "precluster_time": creation_time, "package_version": ""}) - - -@schema -class LFP(dj.Imported): - """Extracts local field potentials (LFP) from an electrophysiology recording. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - lfp_sampling_rate (float): Sampling rate for LFPs in Hz. - lfp_time_stamps (longblob): Time stamps with respect to the start of the recording. - lfp_mean (longblob): Overall mean LFP across electrodes. - """ - - definition = """ - # Acquired local field potential (LFP) from a given Ephys recording. - -> PreCluster - --- - lfp_sampling_rate: float # (Hz) - lfp_time_stamps: longblob # (s) timestamps with respect to the start of the recording (recording_timestamp) - lfp_mean: longblob # (uV) mean of LFP across electrodes - shape (time,) - """ - - class Electrode(dj.Part): - """Saves local field potential data for each electrode. - - Attributes: - LFP (foreign key): LFP primary key. - probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key. - lfp (longblob): LFP recording at this electrode in microvolts. - """ - - definition = """ - -> master - -> probe.ElectrodeConfig.Electrode - --- - lfp: longblob # (uV) recorded lfp at this electrode - """ - - # Only store LFP for every 9th channel, due to high channel density, - # close-by channels exhibit highly similar LFP - _skip_channel_counts = 9 - - def make(self, key): - """Populates the LFP tables.""" - acq_software, probe_sn = (EphysRecording * ProbeInsertion & key).fetch1( - "acq_software", "probe" - ) - - electrode_keys, lfp = [], [] - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - - lfp_channel_ind = spikeglx_recording.lfmeta.recording_channels[ - -1 :: -self._skip_channel_counts - ] - - # Extract LFP data at specified channels and convert to uV - lfp = spikeglx_recording.lf_timeseries[ - :, lfp_channel_ind - ] # (sample x channel) - lfp = ( - lfp * spikeglx_recording.get_channel_bit_volts("lf")[lfp_channel_ind] - ).T # (channel x sample) - - self.insert1( - dict( - key, - lfp_sampling_rate=spikeglx_recording.lfmeta.meta["imSampRate"], - lfp_time_stamps=( - np.arange(lfp.shape[1]) - / spikeglx_recording.lfmeta.meta["imSampRate"] - ), - lfp_mean=lfp.mean(axis=0), - ) - ) - - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - for recorded_site in lfp_channel_ind: - shank, shank_col, shank_row, _ = spikeglx_recording.apmeta.shankmap[ - "data" - ][recorded_site] - electrode_keys.append(probe_electrodes[(shank, shank_col, shank_row)]) - elif acq_software == "Open Ephys": - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - - loaded_oe = openephys.OpenEphys(session_dir) - oe_probe = loaded_oe.probes[probe_sn] - - lfp_channel_ind = np.arange(len(oe_probe.lfp_meta["channels_ids"]))[ - -1 :: -self._skip_channel_counts - ] - - lfp = oe_probe.lfp_timeseries[:, lfp_channel_ind] # (sample x channel) - lfp = ( - lfp * np.array(oe_probe.lfp_meta["channels_gains"])[lfp_channel_ind] - ).T # (channel x sample) - lfp_timestamps = oe_probe.lfp_timestamps - - self.insert1( - dict( - key, - lfp_sampling_rate=oe_probe.lfp_meta["sample_rate"], - lfp_time_stamps=lfp_timestamps, - lfp_mean=lfp.mean(axis=0), - ) - ) - - electrode_query = ( - probe.ProbeType.Electrode - * probe.ElectrodeConfig.Electrode - * EphysRecording - & key - ) - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - for channel_idx in np.array(oe_probe.lfp_meta["channels_ids"])[ - lfp_channel_ind - ]: - electrode_keys.append(probe_electrodes[channel_idx]) - else: - raise NotImplementedError( - f"LFP extraction from acquisition software" - f" of type {acq_software} is not yet implemented" - ) - - # single insert in loop to mitigate potential memory issue - for electrode_key, lfp_trace in zip(electrode_keys, lfp): - self.Electrode.insert1({**key, **electrode_key, "lfp": lfp_trace}) - - -# ------------ Clustering -------------- - - -@schema -class ClusteringMethod(dj.Lookup): - """Kilosort clustering method. - - Attributes: - clustering_method (foreign key, varchar(16) ): Kilosort clustering method. - clustering_methods_desc (varchar(1000) ): Additional description of the clustering method. - """ - - definition = """ - # Method for clustering - clustering_method: varchar(16) - --- - clustering_method_desc: varchar(1000) - """ - - contents = [ - ("kilosort", "kilosort clustering method"), - ("kilosort2", "kilosort2 clustering method"), - ] - - -@schema -class ClusteringParamSet(dj.Lookup): - """Parameters to be used in clustering procedure for spike sorting. - - Attributes: - paramset_idx (foreign key): Unique ID for the clustering parameter set. - ClusteringMethod (dict): ClusteringMethod primary key. - paramset_desc (varchar(128) ): Description of the clustering parameter set. - param_set_hash (uuid): UUID hash for the parameter set. - params (longblob): Paramset, dictionary of all applicable parameters. - """ - - definition = """ - # Parameter set to be used in a clustering procedure - paramset_idx: smallint - --- - -> ClusteringMethod - paramset_desc: varchar(128) - param_set_hash: uuid - unique index (param_set_hash) - params: longblob # dictionary of all applicable parameters - """ - - @classmethod - def insert_new_params( - cls, processing_method: str, paramset_idx: int, paramset_desc: str, params: dict - ): - """Inserts new parameters into the ClusteringParamSet table. - - Args: - processing_method (str): name of the clustering method. - paramset_desc (str): description of the parameter set - params (dict): clustering parameters - paramset_idx (int, optional): Unique parameter set ID. Defaults to None. - """ - param_dict = { - "clustering_method": processing_method, - "paramset_idx": paramset_idx, - "paramset_desc": paramset_desc, - "params": params, - "param_set_hash": dict_to_uuid(params), - } - param_query = cls & {"param_set_hash": param_dict["param_set_hash"]} - - if param_query: # If the specified param-set already exists - existing_paramset_idx = param_query.fetch1("paramset_idx") - if ( - existing_paramset_idx == paramset_idx - ): # If the existing set has the same paramset_idx: job done - return - else: # If not same name: human error, trying to add the same paramset with different name - raise dj.DataJointError( - "The specified param-set" - " already exists - paramset_idx: {}".format(existing_paramset_idx) - ) - else: - cls.insert1(param_dict) - - -@schema -class ClusterQualityLabel(dj.Lookup): - """Quality label for each spike sorted cluster. - - Attributes: - cluster_quality_label (foreign key, varchar(100) ): Cluster quality type. - cluster_quality_description (varchar(4000) ): Description of the cluster quality type. - """ - - definition = """ - # Quality - cluster_quality_label: varchar(100) - --- - cluster_quality_description: varchar(4000) - """ - contents = [ - ("good", "single unit"), - ("ok", "probably a single unit, but could be contaminated"), - ("mua", "multi-unit activity"), - ("noise", "bad unit"), - ] - - -@schema -class ClusteringTask(dj.Manual): - """A clustering task to spike sort electrophysiology datasets. - - Attributes: - EphysRecording (foreign key): EphysRecording primary key. - ClusteringParamSet (foreign key): ClusteringParamSet primary key. - clustering_outdir_dir (varchar (255) ): Relative path to output clustering results. - task_mode (enum): `Trigger` computes clustering or and `load` imports existing data. - """ - - definition = """ - # Manual table for defining a clustering task ready to be run - -> PreCluster - -> ClusteringParamSet - --- - clustering_output_dir: varchar(255) # clustering output directory relative to the clustering root data directory - task_mode='load': enum('load', 'trigger') # 'load': load computed analysis results, 'trigger': trigger computation - """ - - -@schema -class Clustering(dj.Imported): - """A processing table to handle each clustering task. - - Attributes: - ClusteringTask (foreign key): ClusteringTask primary key. - clustering_time (datetime): Time when clustering results are generated. - package_version (varchar(16) ): Package version used for a clustering analysis. - """ - - definition = """ - # Clustering Procedure - -> ClusteringTask - --- - clustering_time: datetime # time of generation of this set of clustering results - package_version='': varchar(16) - """ - - def make(self, key): - """Triggers or imports clustering analysis.""" - task_mode, output_dir = (ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - if task_mode == "load": - _ = kilosort.Kilosort( - kilosort_dir - ) # check if the directory is a valid Kilosort output - creation_time, _, _ = kilosort.extract_clustering_info(kilosort_dir) - elif task_mode == "trigger": - raise NotImplementedError( - "Automatic triggering of" " clustering analysis is not yet supported" - ) - else: - raise ValueError(f"Unknown task mode: {task_mode}") - - self.insert1({**key, "clustering_time": creation_time, "package_version": ""}) - - -@schema -class Curation(dj.Manual): - """Curation procedure table. - - Attributes: - Clustering (foreign key): Clustering primary key. - curation_id (foreign key, int): Unique curation ID. - curation_time (datetime): Time when curation results are generated. - curation_output_dir (varchar(255) ): Output directory of the curated results. - quality_control (bool): If True, this clustering result has undergone quality control. - manual_curation (bool): If True, manual curation has been performed on this clustering result. - curation_note (varchar(2000) ): Notes about the curation task. - """ - - definition = """ - # Manual curation procedure - -> Clustering - curation_id: int - --- - curation_time: datetime # time of generation of this set of curated clustering results - curation_output_dir: varchar(255) # output directory of the curated results, relative to root data directory - quality_control: bool # has this clustering result undergone quality control? - manual_curation: bool # has manual curation been performed on this clustering result? - curation_note='': varchar(2000) - """ - - def create1_from_clustering_task(self, key, curation_note: str = ""): - """ - A function to create a new corresponding "Curation" for a particular - "ClusteringTask" - """ - if key not in Clustering(): - raise ValueError( - f"No corresponding entry in Clustering available" - f" for: {key}; do `Clustering.populate(key)`" - ) - - task_mode, output_dir = (ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - creation_time, is_curated, is_qc = kilosort.extract_clustering_info( - kilosort_dir - ) - # Synthesize curation_id - curation_id = ( - dj.U().aggr(self & key, n="ifnull(max(curation_id)+1,1)").fetch1("n") - ) - self.insert1( - { - **key, - "curation_id": curation_id, - "curation_time": creation_time, - "curation_output_dir": output_dir, - "quality_control": is_qc, - "manual_curation": is_curated, - "curation_note": curation_note, - } - ) - - -@schema -class CuratedClustering(dj.Imported): - """Clustering results after curation. - - Attributes: - Curation (foreign key): Curation primary key. - """ - - definition = """ - # Clustering results of a curation. - -> Curation - """ - - class Unit(dj.Part): - """Single unit properties after clustering and curation. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - unit (foreign key, int): Unique integer identifying a single unit. - probe.ElectrodeConfig.Electrode (dict): probe.ElectrodeConfig.Electrode primary key. - ClusteringQualityLabel (dict): CLusteringQualityLabel primary key. - spike_count (int): Number of spikes in this recording for this unit. - spike_times (longblob): Spike times of this unit, relative to start time of EphysRecording. - spike_sites (longblob): Array of electrode associated with each spike. - spike_depths (longblob): Array of depths associated with each spike, relative to each spike. - """ - - definition = """ - # Properties of a given unit from a round of clustering (and curation) - -> master - unit: int - --- - -> probe.ElectrodeConfig.Electrode # electrode with highest waveform amplitude for this unit - -> ClusterQualityLabel - spike_count: int # how many spikes in this recording for this unit - spike_times: longblob # (s) spike times of this unit, relative to the start of the EphysRecording - spike_sites : longblob # array of electrode associated with each spike - spike_depths=null : longblob # (um) array of depths associated with each spike, relative to the (0, 0) of the probe - """ - - def make(self, key): - """Automated population of Unit information.""" - output_dir = (Curation & key).fetch1("curation_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - acq_software = (EphysRecording & key).fetch1("acq_software") - - # ---------- Unit ---------- - # -- Remove 0-spike units - withspike_idx = [ - i - for i, u in enumerate(kilosort_dataset.data["cluster_ids"]) - if (kilosort_dataset.data["spike_clusters"] == u).any() - ] - valid_units = kilosort_dataset.data["cluster_ids"][withspike_idx] - valid_unit_labels = kilosort_dataset.data["cluster_groups"][withspike_idx] - # -- Get channel and electrode-site mapping - channel2electrodes = get_neuropixels_channel2electrode_map(key, acq_software) - - # -- Spike-times -- - # spike_times_sec_adj > spike_times_sec > spike_times - spike_time_key = ( - "spike_times_sec_adj" - if "spike_times_sec_adj" in kilosort_dataset.data - else ( - "spike_times_sec" - if "spike_times_sec" in kilosort_dataset.data - else "spike_times" - ) - ) - spike_times = kilosort_dataset.data[spike_time_key] - kilosort_dataset.extract_spike_depths() - - # -- Spike-sites and Spike-depths -- - spike_sites = np.array( - [ - channel2electrodes[s]["electrode"] - for s in kilosort_dataset.data["spike_sites"] - ] - ) - spike_depths = kilosort_dataset.data["spike_depths"] - - # -- Insert unit, label, peak-chn - units = [] - for unit, unit_lbl in zip(valid_units, valid_unit_labels): - if (kilosort_dataset.data["spike_clusters"] == unit).any(): - unit_channel, _ = kilosort_dataset.get_best_channel(unit) - unit_spike_times = ( - spike_times[kilosort_dataset.data["spike_clusters"] == unit] - / kilosort_dataset.data["params"]["sample_rate"] - ) - spike_count = len(unit_spike_times) - - units.append( - { - "unit": unit, - "cluster_quality_label": unit_lbl, - **channel2electrodes[unit_channel], - "spike_times": unit_spike_times, - "spike_count": spike_count, - "spike_sites": spike_sites[ - kilosort_dataset.data["spike_clusters"] == unit - ], - "spike_depths": ( - spike_depths[ - kilosort_dataset.data["spike_clusters"] == unit - ] - if spike_depths is not None - else None - ), - } - ) - - self.insert1(key) - self.Unit.insert([{**key, **u} for u in units]) - - -@schema -class WaveformSet(dj.Imported): - """A set of spike waveforms for units out of a given CuratedClustering. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - """ - - definition = """ - # A set of spike waveforms for units out of a given CuratedClustering - -> CuratedClustering - """ - - class PeakWaveform(dj.Part): - """Mean waveform across spikes for a given unit. - - Attributes: - WaveformSet (foreign key): WaveformSet primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - peak_electrode_waveform (longblob): Mean waveform for a given unit at its representative electrode. - """ - - definition = """ - # Mean waveform across spikes for a given unit at its representative electrode - -> master - -> CuratedClustering.Unit - --- - peak_electrode_waveform: longblob # (uV) mean waveform for a given unit at its representative electrode - """ - - class Waveform(dj.Part): - """Spike waveforms for a given unit. - - Attributes: - WaveformSet (foreign key): WaveformSet primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - probe.ElectrodeConfig.Electrode (foreign key): probe.ElectrodeConfig.Electrode primary key. - waveform_mean (longblob): mean waveform across spikes of the unit in microvolts. - waveforms (longblob): waveforms of a sampling of spikes at the given electrode and unit. - """ - - definition = """ - # Spike waveforms and their mean across spikes for the given unit - -> master - -> CuratedClustering.Unit - -> probe.ElectrodeConfig.Electrode - --- - waveform_mean: longblob # (uV) mean waveform across spikes of the given unit - waveforms=null: longblob # (uV) (spike x sample) waveforms of a sampling of spikes at the given electrode for the given unit - """ - - def make(self, key): - """Populates waveform tables.""" - output_dir = (Curation & key).fetch1("curation_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - kilosort_dataset = kilosort.Kilosort(kilosort_dir) - - acq_software, probe_serial_number = ( - EphysRecording * ProbeInsertion & key - ).fetch1("acq_software", "probe") - - # -- Get channel and electrode-site mapping - recording_key = (EphysRecording & key).fetch1("KEY") - channel2electrodes = get_neuropixels_channel2electrode_map( - recording_key, acq_software - ) - - is_qc = (Curation & key).fetch1("quality_control") - - # Get all units - units = { - u["unit"]: u - for u in (CuratedClustering.Unit & key).fetch(as_dict=True, order_by="unit") - } - - if is_qc: - unit_waveforms = np.load( - kilosort_dir / "mean_waveforms.npy" - ) # unit x channel x sample - - def yield_unit_waveforms(): - for unit_no, unit_waveform in zip( - kilosort_dataset.data["cluster_ids"], unit_waveforms - ): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - if unit_no in units: - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], unit_waveform - ): - unit_electrode_waveforms.append( - { - **units[unit_no], - **channel2electrodes[channel], - "waveform_mean": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == units[unit_no]["electrode"] - ): - unit_peak_waveform = { - **units[unit_no], - "peak_electrode_waveform": channel_waveform, - } - yield unit_peak_waveform, unit_electrode_waveforms - - else: - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(key) - neuropixels_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - elif acq_software == "Open Ephys": - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(key) - ) - openephys_dataset = openephys.OpenEphys(session_dir) - neuropixels_recording = openephys_dataset.probes[probe_serial_number] - - def yield_unit_waveforms(): - for unit_dict in units.values(): - unit_peak_waveform = {} - unit_electrode_waveforms = [] - - spikes = unit_dict["spike_times"] - waveforms = neuropixels_recording.extract_spike_waveforms( - spikes, kilosort_dataset.data["channel_map"] - ) # (sample x channel x spike) - waveforms = waveforms.transpose( - (1, 2, 0) - ) # (channel x spike x sample) - for channel, channel_waveform in zip( - kilosort_dataset.data["channel_map"], waveforms - ): - unit_electrode_waveforms.append( - { - **unit_dict, - **channel2electrodes[channel], - "waveform_mean": channel_waveform.mean(axis=0), - "waveforms": channel_waveform, - } - ) - if ( - channel2electrodes[channel]["electrode"] - == unit_dict["electrode"] - ): - unit_peak_waveform = { - **unit_dict, - "peak_electrode_waveform": channel_waveform.mean( - axis=0 - ), - } - - yield unit_peak_waveform, unit_electrode_waveforms - - # insert waveform on a per-unit basis to mitigate potential memory issue - self.insert1(key) - for unit_peak_waveform, unit_electrode_waveforms in yield_unit_waveforms(): - self.PeakWaveform.insert1(unit_peak_waveform, ignore_extra_fields=True) - self.Waveform.insert(unit_electrode_waveforms, ignore_extra_fields=True) - - -@schema -class QualityMetrics(dj.Imported): - """Clustering and waveform quality metrics. - - Attributes: - CuratedClustering (foreign key): CuratedClustering primary key. - """ - - definition = """ - # Clusters and waveforms metrics - -> CuratedClustering - """ - - class Cluster(dj.Part): - """Cluster metrics for a unit. - - Attributes: - QualityMetrics (foreign key): QualityMetrics primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - firing_rate (float): Firing rate of the unit. - snr (float): Signal-to-noise ratio for a unit. - presence_ratio (float): Fraction of time where spikes are present. - isi_violation (float): rate of ISI violation as a fraction of overall rate. - number_violation (int): Total ISI violations. - amplitude_cutoff (float): Estimate of miss rate based on amplitude histogram. - isolation_distance (float): Distance to nearest cluster. - l_ratio (float): Amount of empty space between a cluster and other spikes in dataset. - d_prime (float): Classification accuracy based on LDA. - nn_hit_rate (float): Fraction of neighbors for target cluster that are also in target cluster. - nn_miss_rate (float): Fraction of neighbors outside target cluster that are in the target cluster. - silhouette_core (float): Maximum change in spike depth throughout recording. - cumulative_drift (float): Cumulative change in spike depth throughout recording. - contamination_rate (float): Frequency of spikes in the refractory period. - """ - - definition = """ - # Cluster metrics for a particular unit - -> master - -> CuratedClustering.Unit - --- - firing_rate=null: float # (Hz) firing rate for a unit - snr=null: float # signal-to-noise ratio for a unit - presence_ratio=null: float # fraction of time in which spikes are present - isi_violation=null: float # rate of ISI violation as a fraction of overall rate - number_violation=null: int # total number of ISI violations - amplitude_cutoff=null: float # estimate of miss rate based on amplitude histogram - isolation_distance=null: float # distance to nearest cluster in Mahalanobis space - l_ratio=null: float # - d_prime=null: float # Classification accuracy based on LDA - nn_hit_rate=null: float # Fraction of neighbors for target cluster that are also in target cluster - nn_miss_rate=null: float # Fraction of neighbors outside target cluster that are in target cluster - silhouette_score=null: float # Standard metric for cluster overlap - max_drift=null: float # Maximum change in spike depth throughout recording - cumulative_drift=null: float # Cumulative change in spike depth throughout recording - contamination_rate=null: float # - """ - - class Waveform(dj.Part): - """Waveform metrics for a particular unit. - - Attributes: - QualityMetrics (foreign key): QualityMetrics primary key. - CuratedClustering.Unit (foreign key): CuratedClustering.Unit primary key. - amplitude (float): Absolute difference between waveform peak and trough in microvolts. - duration (float): Time between waveform peak and trough in milliseconds. - halfwidth (float): Spike width at half max amplitude. - pt_ratio (float): Absolute amplitude of peak divided by absolute amplitude of trough relative to 0. - repolarization_slope (float): Slope of the regression line fit to first 30 microseconds from trough to peak. - recovery_slope (float): Slope of the regression line fit to first 30 microseconds from peak to tail. - spread (float): The range with amplitude over 12-percent of maximum amplitude along the probe. - velocity_above (float): inverse velocity of waveform propagation from soma to the top of the probe. - velocity_below (float): inverse velocity of waveform propagation from soma toward the bottom of the probe. - """ - - definition = """ - # Waveform metrics for a particular unit - -> master - -> CuratedClustering.Unit - --- - amplitude: float # (uV) absolute difference between waveform peak and trough - duration: float # (ms) time between waveform peak and trough - halfwidth=null: float # (ms) spike width at half max amplitude - pt_ratio=null: float # absolute amplitude of peak divided by absolute amplitude of trough relative to 0 - repolarization_slope=null: float # the repolarization slope was defined by fitting a regression line to the first 30us from trough to peak - recovery_slope=null: float # the recovery slope was defined by fitting a regression line to the first 30us from peak to tail - spread=null: float # (um) the range with amplitude above 12-percent of the maximum amplitude along the probe - velocity_above=null: float # (s/m) inverse velocity of waveform propagation from the soma toward the top of the probe - velocity_below=null: float # (s/m) inverse velocity of waveform propagation from the soma toward the bottom of the probe - """ - - def make(self, key): - """Populates tables with quality metrics data.""" - output_dir = (ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(get_ephys_root_data_dir(), output_dir) - - metric_fp = kilosort_dir / "metrics.csv" - rename_dict = { - "isi_viol": "isi_violation", - "num_viol": "number_violation", - "contam_rate": "contamination_rate", - } - - if not metric_fp.exists(): - raise FileNotFoundError(f"QC metrics file not found: {metric_fp}") - - metrics_df = pd.read_csv(metric_fp) - metrics_df.set_index("cluster_id", inplace=True) - metrics_df.replace([np.inf, -np.inf], np.nan, inplace=True) - metrics_df.columns = metrics_df.columns.str.lower() - metrics_df.rename(columns=rename_dict, inplace=True) - metrics_list = [ - dict(metrics_df.loc[unit_key["unit"]], **unit_key) - for unit_key in (CuratedClustering.Unit & key).fetch("KEY") - ] - - self.insert1(key) - self.Cluster.insert(metrics_list, ignore_extra_fields=True) - self.Waveform.insert(metrics_list, ignore_extra_fields=True) - - -# ---------------- HELPER FUNCTIONS ---------------- - - -def get_spikeglx_meta_filepath(ephys_recording_key: dict) -> str: - """Get spikeGLX data filepath.""" - # attempt to retrieve from EphysRecording.EphysFile - spikeglx_meta_filepath = ( - EphysRecording.EphysFile & ephys_recording_key & 'file_path LIKE "%.ap.meta"' - ).fetch1("file_path") - - try: - spikeglx_meta_filepath = find_full_path( - get_ephys_root_data_dir(), spikeglx_meta_filepath - ) - except FileNotFoundError: - # if not found, search in session_dir again - if not spikeglx_meta_filepath.exists(): - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(ephys_recording_key) - ) - inserted_probe_serial_number = ( - ProbeInsertion * probe.Probe & ephys_recording_key - ).fetch1("probe") - - spikeglx_meta_filepaths = [fp for fp in session_dir.rglob("*.ap.meta")] - for meta_filepath in spikeglx_meta_filepaths: - spikeglx_meta = spikeglx.SpikeGLXMeta(meta_filepath) - if str(spikeglx_meta.probe_SN) == inserted_probe_serial_number: - spikeglx_meta_filepath = meta_filepath - break - else: - raise FileNotFoundError( - "No SpikeGLX data found for probe insertion: {}".format( - ephys_recording_key - ) - ) - - return spikeglx_meta_filepath - - -def get_neuropixels_channel2electrode_map( - ephys_recording_key: dict, acq_software: str -) -> dict: - """Get the channel map for neuropixels probe.""" - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = get_spikeglx_meta_filepath(ephys_recording_key) - spikeglx_meta = spikeglx.SpikeGLXMeta(spikeglx_meta_filepath) - electrode_config_key = ( - EphysRecording * probe.ElectrodeConfig & ephys_recording_key - ).fetch1("KEY") - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode - & electrode_config_key - ) - - probe_electrodes = { - (shank, shank_col, shank_row): key - for key, shank, shank_col, shank_row in zip( - *electrode_query.fetch("KEY", "shank", "shank_col", "shank_row") - ) - } - - channel2electrode_map = { - recorded_site: probe_electrodes[(shank, shank_col, shank_row)] - for recorded_site, (shank, shank_col, shank_row, _) in enumerate( - spikeglx_meta.shankmap["data"] - ) - } - elif acq_software == "Open Ephys": - session_dir = find_full_path( - get_ephys_root_data_dir(), get_session_directory(ephys_recording_key) - ) - openephys_dataset = openephys.OpenEphys(session_dir) - probe_serial_number = (ProbeInsertion & ephys_recording_key).fetch1("probe") - probe_dataset = openephys_dataset.probes[probe_serial_number] - - electrode_query = ( - probe.ProbeType.Electrode * probe.ElectrodeConfig.Electrode * EphysRecording - & ephys_recording_key - ) - - probe_electrodes = { - key["electrode"]: key for key in electrode_query.fetch("KEY") - } - - channel2electrode_map = { - channel_idx: probe_electrodes[channel_idx] - for channel_idx in probe_dataset.ap_meta["channels_ids"] - } - - return channel2electrode_map - - -def generate_electrode_config(probe_type: str, electrode_keys: list) -> dict: - """Generate and insert new ElectrodeConfig - - Args: - probe_type (str): probe type (e.g. neuropixels 2.0 - SS) - electrode_keys (list): list of keys of the probe.ProbeType.Electrode table - - Returns: - dict: representing a key of the probe.ElectrodeConfig table - """ - # compute hash for the electrode config (hash of dict of all ElectrodeConfig.Electrode) - electrode_config_hash = dict_to_uuid({k["electrode"]: k for k in electrode_keys}) - - electrode_list = sorted([k["electrode"] for k in electrode_keys]) - electrode_gaps = ( - [-1] - + np.where(np.diff(electrode_list) > 1)[0].tolist() - + [len(electrode_list) - 1] - ) - electrode_config_name = "; ".join( - [ - f"{electrode_list[start + 1]}-{electrode_list[end]}" - for start, end in zip(electrode_gaps[:-1], electrode_gaps[1:]) - ] - ) - - electrode_config_key = {"electrode_config_hash": electrode_config_hash} - - # ---- make new ElectrodeConfig if needed ---- - if not probe.ElectrodeConfig & electrode_config_key: - probe.ElectrodeConfig.insert1( - { - **electrode_config_key, - "probe_type": probe_type, - "electrode_config_name": electrode_config_name, - } - ) - probe.ElectrodeConfig.Electrode.insert( - {**electrode_config_key, **electrode} for electrode in electrode_keys - ) - - return electrode_config_key diff --git a/element_array_ephys/ephys_report.py b/element_array_ephys/ephys_report.py index 48bcf613..c962d33d 100644 --- a/element_array_ephys/ephys_report.py +++ b/element_array_ephys/ephys_report.py @@ -7,26 +7,24 @@ import datajoint as dj from element_interface.utils import dict_to_uuid -from . import probe +from . import probe, ephys schema = dj.schema() -ephys = None - -def activate(schema_name, ephys_schema_name, *, create_schema=True, create_tables=True): +def activate(schema_name, *, create_schema=True, create_tables=True): """Activate the current schema. Args: schema_name (str): schema name on the database server to activate the `ephys_report` schema. - ephys_schema_name (str): schema name of the activated ephys element for which - this ephys_report schema will be downstream from. create_schema (bool, optional): If True (default), create schema in the database if it does not yet exist. create_tables (bool, optional): If True (default), create tables in the database if they do not yet exist. """ + if not probe.schema.is_activated(): + raise RuntimeError("Please activate the `probe` schema first.") + if not ephys.schema.is_activated(): + raise RuntimeError("Please activate the `ephys` schema first.") - global ephys - ephys = dj.create_virtual_module("ephys", ephys_schema_name) schema.activate( schema_name, create_schema=create_schema, diff --git a/element_array_ephys/export/nwb/nwb.py b/element_array_ephys/export/nwb/nwb.py index a45eb754..8d7da8f5 100644 --- a/element_array_ephys/export/nwb/nwb.py +++ b/element_array_ephys/export/nwb/nwb.py @@ -17,14 +17,7 @@ from spikeinterface import extractors from tqdm import tqdm -from ... import ephys_no_curation as ephys -from ... import probe - -ephys_mode = os.getenv("EPHYS_MODE", dj.config["custom"].get("ephys_mode", "acute")) -if ephys_mode != "no-curation": - raise NotImplementedError( - "This export function is designed for the no_curation " + "schema" - ) +from ... import probe, ephys class DecimalEncoder(json.JSONEncoder): diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 550ae4a1..547fd8ce 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -1,5 +1,7 @@ """ -The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the "spikeinterface" pipeline. Spikeinterface was developed by Alessio Buccino, Samuel Garcia, Cole Hurwitz, Jeremy Magland, and Matthias Hennig (https://github.com/SpikeInterface) +The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the "spikeinterface" pipeline. +Spikeinterface was developed by Alessio Buccino, Samuel Garcia, Cole Hurwitz, Jeremy Magland, and Matthias Hennig (https://github.com/SpikeInterface) +If you use this pipeline, please cite SpikeInterface and the relevant sorter(s) used in your publication (see https://github.com/SpikeInterface for additional details for citation). """ from datetime import datetime @@ -7,7 +9,7 @@ import datajoint as dj import pandas as pd import spikeinterface as si -from element_array_ephys import probe, readers +from element_array_ephys import probe, ephys, readers from element_interface.utils import find_full_path, memoized_result from spikeinterface import exporters, extractors, sorters @@ -17,25 +19,25 @@ schema = dj.schema() -ephys = None - def activate( schema_name, *, - ephys_module, create_schema=True, create_tables=True, ): + """Activate the current schema. + + Args: + schema_name (str): schema name on the database server to activate the `si_spike_sorting` schema. + create_schema (bool, optional): If True (default), create schema in the database if it does not yet exist. + create_tables (bool, optional): If True (default), create tables in the database if they do not yet exist. """ - activate(schema_name, *, create_schema=True, create_tables=True, activated_ephys=None) - :param schema_name: schema name on the database server to activate the `spike_sorting` schema - :param ephys_module: the activated ephys element for which this `spike_sorting` schema will be downstream from - :param create_schema: when True (default), create schema in the database if it does not yet exist. - :param create_tables: when True (default), create tables in the database if they do not yet exist. - """ - global ephys - ephys = ephys_module + if not probe.schema.is_activated(): + raise RuntimeError("Please activate the `probe` schema first.") + if not ephys.schema.is_activated(): + raise RuntimeError("Please activate the `ephys` schema first.") + schema.activate( schema_name, create_schema=create_schema, diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 2e6de55a..19ba4c76 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.4.0" +__version__ = "1.0.0" diff --git a/tests/tutorial_pipeline.py b/tests/tutorial_pipeline.py index 74b27ddc..1b27027d 100644 --- a/tests/tutorial_pipeline.py +++ b/tests/tutorial_pipeline.py @@ -3,7 +3,7 @@ import datajoint as dj from element_animal import subject from element_animal.subject import Subject -from element_array_ephys import probe, ephys_no_curation as ephys, ephys_report +from element_array_ephys import probe, ephys, ephys_report from element_lab import lab from element_lab.lab import Lab, Location, Project, Protocol, Source, User from element_lab.lab import Device as Equipment @@ -62,7 +62,9 @@ def get_session_directory(session_key): return pathlib.Path(session_directory) -ephys.activate(db_prefix + "ephys", db_prefix + "probe", linking_module=__name__) +probe.activate(db_prefix + "probe") +ephys.activate(db_prefix + "ephys", linking_module=__name__) +ephys_report.activate(db_prefix + "ephys_report") probe.create_neuropixels_probe_types() From 0eef1cbaec2494b7dec7a5af2e8d9d62986280cb Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 10 Sep 2024 15:07:34 -0500 Subject: [PATCH 144/204] rearrange: remove the `ecephys_spike_sorting` flow --- element_array_ephys/ephys.py | 2 +- .../spike_sorting/ecephys_spike_sorting.py | 317 ------------------ .../kilosort_triggering.py | 0 3 files changed, 1 insertion(+), 318 deletions(-) delete mode 100644 element_array_ephys/spike_sorting/ecephys_spike_sorting.py rename element_array_ephys/{readers => spike_sorting}/kilosort_triggering.py (100%) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index 3025d289..f17527c1 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -897,7 +897,7 @@ def make(self, key): ).fetch1("acq_software", "clustering_method", "params") if "kilosort" in clustering_method: - from element_array_ephys.readers import kilosort_triggering + from .spike_sorting import kilosort_triggering # add additional probe-recording and channels details into `params` params = {**params, **get_recording_channels_details(key)} diff --git a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py b/element_array_ephys/spike_sorting/ecephys_spike_sorting.py deleted file mode 100644 index 3a43c384..00000000 --- a/element_array_ephys/spike_sorting/ecephys_spike_sorting.py +++ /dev/null @@ -1,317 +0,0 @@ -""" -The following DataJoint pipeline implements the sequence of steps in the spike-sorting routine featured in the -"ecephys_spike_sorting" pipeline. -The "ecephys_spike_sorting" was originally developed by the Allen Institute (https://github.com/AllenInstitute/ecephys_spike_sorting) for Neuropixels data acquired with Open Ephys acquisition system. -Then forked by Jennifer Colonell from the Janelia Research Campus (https://github.com/jenniferColonell/ecephys_spike_sorting) to support SpikeGLX acquisition system. - -At DataJoint, we fork from Jennifer's fork and implemented a version that supports both Open Ephys and Spike GLX. -https://github.com/datajoint-company/ecephys_spike_sorting - -The follow pipeline features three tables: -1. KilosortPreProcessing - for preprocessing steps (no GPU required) - - median_subtraction for Open Ephys - - or the CatGT step for SpikeGLX -2. KilosortClustering - kilosort (MATLAB) - requires GPU - - supports kilosort 2.0, 2.5 or 3.0 (https://github.com/MouseLand/Kilosort.git) -3. KilosortPostProcessing - for postprocessing steps (no GPU required) - - kilosort_postprocessing - - noise_templates - - mean_waveforms - - quality_metrics -""" - - -import datajoint as dj -from decimal import Decimal -import json -from datetime import datetime, timedelta - -from element_interface.utils import find_full_path -from element_array_ephys.readers import ( - spikeglx, - kilosort_triggering, -) - -log = dj.logger - -schema = dj.schema() - -ephys = None - -_supported_kilosort_versions = [ - "kilosort2", - "kilosort2.5", - "kilosort3", -] - - -def activate( - schema_name, - *, - ephys_module, - create_schema=True, - create_tables=True, -): - """ - activate(schema_name, *, create_schema=True, create_tables=True, activated_ephys=None) - :param schema_name: schema name on the database server to activate the `spike_sorting` schema - :param ephys_module: the activated ephys element for which this `spike_sorting` schema will be downstream from - :param create_schema: when True (default), create schema in the database if it does not yet exist. - :param create_tables: when True (default), create tables in the database if they do not yet exist. - """ - global ephys - ephys = ephys_module - schema.activate( - schema_name, - create_schema=create_schema, - create_tables=create_tables, - add_objects=ephys.__dict__, - ) - - -@schema -class KilosortPreProcessing(dj.Imported): - """A processing table to handle each clustering task.""" - - definition = """ - -> ephys.ClusteringTask - --- - params: longblob # finalized parameterset for this run - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration - """ - - @property - def key_source(self): - return ( - ephys.ClusteringTask * ephys.ClusteringParamSet - & {"task_mode": "trigger"} - & 'clustering_method in ("kilosort2", "kilosort2.5", "kilosort3")' - ) - ephys.Clustering - - def make(self, key): - """Triggers or imports clustering analysis.""" - execution_time = datetime.utcnow() - - task_mode, output_dir = (ephys.ClusteringTask & key).fetch1( - "task_mode", "clustering_output_dir" - ) - - assert task_mode == "trigger", 'Supporting "trigger" task_mode only' - - if not output_dir: - output_dir = ephys.ClusteringTask.infer_output_dir( - key, relative=True, mkdir=True - ) - # update clustering_output_dir - ephys.ClusteringTask.update1( - {**key, "clustering_output_dir": output_dir.as_posix()} - ) - - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method, params = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method", "params") - - assert ( - clustering_method in _supported_kilosort_versions - ), f'Clustering_method "{clustering_method}" is not supported' - - # add additional probe-recording and channels details into `params` - params = {**params, **ephys.get_recording_channels_details(key)} - params["fs"] = params["sample_rate"] - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - spikeglx_recording.validate_file("ap") - run_CatGT = ( - params.get("run_CatGT", True) - and "_tcat." not in spikeglx_meta_filepath.stem - ) - - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=run_CatGT, - ) - run_kilosort.run_CatGT() - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort._modules = ["depth_estimation", "median_subtraction"] - run_kilosort.run_modules() - - self.insert1( - { - **key, - "params": params, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) - - -@schema -class KilosortClustering(dj.Imported): - """A processing table to handle each clustering task.""" - - definition = """ - -> KilosortPreProcessing - --- - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration - """ - - def make(self, key): - execution_time = datetime.utcnow() - - output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method") - - params = (KilosortPreProcessing & key).fetch1("params") - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - spikeglx_recording.validate_file("ap") - - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=True, - ) - run_kilosort._modules = ["kilosort_helper"] - run_kilosort._CatGT_finished = True - run_kilosort.run_modules() - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort._modules = ["kilosort_helper"] - run_kilosort.run_modules() - - self.insert1( - { - **key, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) - - -@schema -class KilosortPostProcessing(dj.Imported): - """A processing table to handle each clustering task.""" - - definition = """ - -> KilosortClustering - --- - modules_status: longblob # dictionary of summary status for all modules - execution_time: datetime # datetime of the start of this step - execution_duration: float # (hour) execution duration - """ - - def make(self, key): - execution_time = datetime.utcnow() - - output_dir = (ephys.ClusteringTask & key).fetch1("clustering_output_dir") - kilosort_dir = find_full_path(ephys.get_ephys_root_data_dir(), output_dir) - - acq_software, clustering_method = ( - ephys.ClusteringTask * ephys.EphysRecording * ephys.ClusteringParamSet & key - ).fetch1("acq_software", "clustering_method") - - params = (KilosortPreProcessing & key).fetch1("params") - - if acq_software == "SpikeGLX": - spikeglx_meta_filepath = ephys.get_spikeglx_meta_filepath(key) - spikeglx_recording = spikeglx.SpikeGLX(spikeglx_meta_filepath.parent) - spikeglx_recording.validate_file("ap") - - run_kilosort = kilosort_triggering.SGLXKilosortPipeline( - npx_input_dir=spikeglx_meta_filepath.parent, - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - run_CatGT=True, - ) - run_kilosort._modules = [ - "kilosort_postprocessing", - "noise_templates", - "mean_waveforms", - "quality_metrics", - ] - run_kilosort._CatGT_finished = True - run_kilosort.run_modules() - elif acq_software == "Open Ephys": - oe_probe = ephys.get_openephys_probe_data(key) - - assert len(oe_probe.recording_info["recording_files"]) == 1 - - # run kilosort - run_kilosort = kilosort_triggering.OpenEphysKilosortPipeline( - npx_input_dir=oe_probe.recording_info["recording_files"][0], - ks_output_dir=kilosort_dir, - params=params, - KS2ver=f'{Decimal(clustering_method.replace("kilosort", "")):.1f}', - ) - run_kilosort._modules = [ - "kilosort_postprocessing", - "noise_templates", - "mean_waveforms", - "quality_metrics", - ] - run_kilosort.run_modules() - - with open(run_kilosort._modules_input_hash_fp) as f: - modules_status = json.load(f) - - self.insert1( - { - **key, - "modules_status": modules_status, - "execution_time": execution_time, - "execution_duration": ( - datetime.utcnow() - execution_time - ).total_seconds() - / 3600, - } - ) - - # all finished, insert this `key` into ephys.Clustering - ephys.Clustering.insert1( - {**key, "clustering_time": datetime.utcnow()}, allow_direct_insert=True - ) diff --git a/element_array_ephys/readers/kilosort_triggering.py b/element_array_ephys/spike_sorting/kilosort_triggering.py similarity index 100% rename from element_array_ephys/readers/kilosort_triggering.py rename to element_array_ephys/spike_sorting/kilosort_triggering.py From c2bd5adb07096a04c7afc28418f1f996533fdf8b Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 10 Sep 2024 15:23:04 -0500 Subject: [PATCH 145/204] chore: clean up diagrams --- ...n.svg => attached_array_ephys_element.svg} | 0 images/attached_array_ephys_element_acute.svg | 451 --------------- .../attached_array_ephys_element_chronic.svg | 456 --------------- ...ttached_array_ephys_element_precluster.svg | 535 ------------------ 4 files changed, 1442 deletions(-) rename images/{attached_array_ephys_element_no_curation.svg => attached_array_ephys_element.svg} (100%) delete mode 100644 images/attached_array_ephys_element_acute.svg delete mode 100644 images/attached_array_ephys_element_chronic.svg delete mode 100644 images/attached_array_ephys_element_precluster.svg diff --git a/images/attached_array_ephys_element_no_curation.svg b/images/attached_array_ephys_element.svg similarity index 100% rename from images/attached_array_ephys_element_no_curation.svg rename to images/attached_array_ephys_element.svg diff --git a/images/attached_array_ephys_element_acute.svg b/images/attached_array_ephys_element_acute.svg deleted file mode 100644 index 5b2bc265..00000000 --- a/images/attached_array_ephys_element_acute.svg +++ /dev/null @@ -1,451 +0,0 @@ - - - - - -ephys.ProbeInsertion - - -ephys.ProbeInsertion - - - - - -ephys.InsertionLocation - - -ephys.InsertionLocation - - - - - -ephys.ProbeInsertion->ephys.InsertionLocation - - - - -ephys.EphysRecording - - -ephys.EphysRecording - - - - - -ephys.ProbeInsertion->ephys.EphysRecording - - - - -ephys.QualityMetrics - - -ephys.QualityMetrics - - - - - -ephys.QualityMetrics.Cluster - - -ephys.QualityMetrics.Cluster - - - - - -ephys.QualityMetrics->ephys.QualityMetrics.Cluster - - - - -ephys.QualityMetrics.Waveform - - -ephys.QualityMetrics.Waveform - - - - - -ephys.QualityMetrics->ephys.QualityMetrics.Waveform - - - - -probe.ElectrodeConfig - - -probe.ElectrodeConfig - - - - - -probe.ElectrodeConfig.Electrode - - -probe.ElectrodeConfig.Electrode - - - - - -probe.ElectrodeConfig->probe.ElectrodeConfig.Electrode - - - - -probe.ElectrodeConfig->ephys.EphysRecording - - - - -ephys.AcquisitionSoftware - - -ephys.AcquisitionSoftware - - - - - -ephys.AcquisitionSoftware->ephys.EphysRecording - - - - -SkullReference - - -SkullReference - - - - - -SkullReference->ephys.InsertionLocation - - - - -ephys.ClusteringParamSet - - -ephys.ClusteringParamSet - - - - - -ephys.ClusteringTask - - -ephys.ClusteringTask - - - - - -ephys.ClusteringParamSet->ephys.ClusteringTask - - - - -ephys.LFP.Electrode - - -ephys.LFP.Electrode - - - - - -ephys.ClusterQualityLabel - - -ephys.ClusterQualityLabel - - - - - -ephys.CuratedClustering.Unit - - -ephys.CuratedClustering.Unit - - - - - -ephys.ClusterQualityLabel->ephys.CuratedClustering.Unit - - - - -ephys.WaveformSet.Waveform - - -ephys.WaveformSet.Waveform - - - - - -ephys.Clustering - - -ephys.Clustering - - - - - -ephys.ClusteringTask->ephys.Clustering - - - - -probe.ProbeType - - -probe.ProbeType - - - - - -probe.ProbeType->probe.ElectrodeConfig - - - - -probe.Probe - - -probe.Probe - - - - - -probe.ProbeType->probe.Probe - - - - -probe.ProbeType.Electrode - - -probe.ProbeType.Electrode - - - - - -probe.ProbeType->probe.ProbeType.Electrode - - - - -ephys.Curation - - -ephys.Curation - - - - - -ephys.Clustering->ephys.Curation - - - - -ephys.LFP - - -ephys.LFP - - - - - -ephys.LFP->ephys.LFP.Electrode - - - - -probe.Probe->ephys.ProbeInsertion - - - - -ephys.CuratedClustering - - -ephys.CuratedClustering - - - - - -ephys.CuratedClustering->ephys.QualityMetrics - - - - -ephys.WaveformSet - - -ephys.WaveformSet - - - - - -ephys.CuratedClustering->ephys.WaveformSet - - - - -ephys.CuratedClustering->ephys.CuratedClustering.Unit - - - - -subject.Subject - - -subject.Subject - - - - - -session.Session - - -session.Session - - - - - -subject.Subject->session.Session - - - - -probe.ElectrodeConfig.Electrode->ephys.LFP.Electrode - - - - -probe.ElectrodeConfig.Electrode->ephys.WaveformSet.Waveform - - - - -probe.ElectrodeConfig.Electrode->ephys.CuratedClustering.Unit - - - - -ephys.Curation->ephys.CuratedClustering - - - - -ephys.ClusteringMethod - - -ephys.ClusteringMethod - - - - - -ephys.ClusteringMethod->ephys.ClusteringParamSet - - - - -ephys.WaveformSet.PeakWaveform - - -ephys.WaveformSet.PeakWaveform - - - - - -session.Session->ephys.ProbeInsertion - - - - -ephys.EphysRecording.EphysFile - - -ephys.EphysRecording.EphysFile - - - - - -ephys.WaveformSet->ephys.WaveformSet.Waveform - - - - -ephys.WaveformSet->ephys.WaveformSet.PeakWaveform - - - - -ephys.CuratedClustering.Unit->ephys.WaveformSet.Waveform - - - - -ephys.CuratedClustering.Unit->ephys.QualityMetrics.Cluster - - - - -ephys.CuratedClustering.Unit->ephys.QualityMetrics.Waveform - - - - -ephys.CuratedClustering.Unit->ephys.WaveformSet.PeakWaveform - - - - -ephys.EphysRecording->ephys.ClusteringTask - - - - -ephys.EphysRecording->ephys.LFP - - - - -ephys.EphysRecording->ephys.EphysRecording.EphysFile - - - - -probe.ProbeType.Electrode->probe.ElectrodeConfig.Electrode - - - - \ No newline at end of file diff --git a/images/attached_array_ephys_element_chronic.svg b/images/attached_array_ephys_element_chronic.svg deleted file mode 100644 index 808a2f17..00000000 --- a/images/attached_array_ephys_element_chronic.svg +++ /dev/null @@ -1,456 +0,0 @@ - - - - - -ephys.Curation - - -ephys.Curation - - - - - -ephys.CuratedClustering - - -ephys.CuratedClustering - - - - - -ephys.Curation->ephys.CuratedClustering - - - - -ephys.AcquisitionSoftware - - -ephys.AcquisitionSoftware - - - - - -ephys.EphysRecording - - -ephys.EphysRecording - - - - - -ephys.AcquisitionSoftware->ephys.EphysRecording - - - - -ephys.ProbeInsertion - - -ephys.ProbeInsertion - - - - - -ephys.ProbeInsertion->ephys.EphysRecording - - - - -ephys.InsertionLocation - - -ephys.InsertionLocation - - - - - -ephys.ProbeInsertion->ephys.InsertionLocation - - - - -subject.Subject - - -subject.Subject - - - - - -subject.Subject->ephys.ProbeInsertion - - - - -session.Session - - -session.Session - - - - - -subject.Subject->session.Session - - - - -ephys.WaveformSet.PeakWaveform - - -ephys.WaveformSet.PeakWaveform - - - - - -ephys.EphysRecording.EphysFile - - -ephys.EphysRecording.EphysFile - - - - - -ephys.EphysRecording->ephys.EphysRecording.EphysFile - - - - -ephys.ClusteringTask - - -ephys.ClusteringTask - - - - - -ephys.EphysRecording->ephys.ClusteringTask - - - - -ephys.LFP - - -ephys.LFP - - - - - -ephys.EphysRecording->ephys.LFP - - - - -probe.Probe - - -probe.Probe - - - - - -probe.Probe->ephys.ProbeInsertion - - - - -ephys.QualityMetrics - - -ephys.QualityMetrics - - - - - -ephys.QualityMetrics.Waveform - - -ephys.QualityMetrics.Waveform - - - - - -ephys.QualityMetrics->ephys.QualityMetrics.Waveform - - - - -ephys.QualityMetrics.Cluster - - -ephys.QualityMetrics.Cluster - - - - - -ephys.QualityMetrics->ephys.QualityMetrics.Cluster - - - - -ephys.ClusteringParamSet - - -ephys.ClusteringParamSet - - - - - -ephys.ClusteringParamSet->ephys.ClusteringTask - - - - -ephys.WaveformSet.Waveform - - -ephys.WaveformSet.Waveform - - - - - -probe.ProbeType - - -probe.ProbeType - - - - - -probe.ProbeType->probe.Probe - - - - -probe.ElectrodeConfig - - -probe.ElectrodeConfig - - - - - -probe.ProbeType->probe.ElectrodeConfig - - - - -probe.ProbeType.Electrode - - -probe.ProbeType.Electrode - - - - - -probe.ProbeType->probe.ProbeType.Electrode - - - - -ephys.Clustering - - -ephys.Clustering - - - - - -ephys.ClusteringTask->ephys.Clustering - - - - -ephys.LFP.Electrode - - -ephys.LFP.Electrode - - - - - -ephys.LFP->ephys.LFP.Electrode - - - - -session.Session->ephys.EphysRecording - - - - -ephys.Clustering->ephys.Curation - - - - -probe.ElectrodeConfig.Electrode - - -probe.ElectrodeConfig.Electrode - - - - - -probe.ElectrodeConfig.Electrode->ephys.WaveformSet.Waveform - - - - -probe.ElectrodeConfig.Electrode->ephys.LFP.Electrode - - - - -ephys.CuratedClustering.Unit - - -ephys.CuratedClustering.Unit - - - - - -probe.ElectrodeConfig.Electrode->ephys.CuratedClustering.Unit - - - - -ephys.WaveformSet - - -ephys.WaveformSet - - - - - -ephys.WaveformSet->ephys.WaveformSet.PeakWaveform - - - - -ephys.WaveformSet->ephys.WaveformSet.Waveform - - - - -probe.ElectrodeConfig->ephys.EphysRecording - - - - -probe.ElectrodeConfig->probe.ElectrodeConfig.Electrode - - - - -probe.ProbeType.Electrode->probe.ElectrodeConfig.Electrode - - - - -ephys.CuratedClustering.Unit->ephys.WaveformSet.PeakWaveform - - - - -ephys.CuratedClustering.Unit->ephys.WaveformSet.Waveform - - - - -ephys.CuratedClustering.Unit->ephys.QualityMetrics.Waveform - - - - -ephys.CuratedClustering.Unit->ephys.QualityMetrics.Cluster - - - - -ephys.ClusteringMethod - - -ephys.ClusteringMethod - - - - - -ephys.ClusteringMethod->ephys.ClusteringParamSet - - - - -ephys.CuratedClustering->ephys.QualityMetrics - - - - -ephys.CuratedClustering->ephys.WaveformSet - - - - -ephys.CuratedClustering->ephys.CuratedClustering.Unit - - - - -ephys.ClusterQualityLabel - - -ephys.ClusterQualityLabel - - - - - -ephys.ClusterQualityLabel->ephys.CuratedClustering.Unit - - - - -SkullReference - - -SkullReference - - - - - -SkullReference->ephys.InsertionLocation - - - - \ No newline at end of file diff --git a/images/attached_array_ephys_element_precluster.svg b/images/attached_array_ephys_element_precluster.svg deleted file mode 100644 index 7d854d2e..00000000 --- a/images/attached_array_ephys_element_precluster.svg +++ /dev/null @@ -1,535 +0,0 @@ - - - - - -ephys.AcquisitionSoftware - - -ephys.AcquisitionSoftware - - - - - -ephys.EphysRecording - - -ephys.EphysRecording - - - - - -ephys.AcquisitionSoftware->ephys.EphysRecording - - - - -ephys.QualityMetrics.Waveform - - -ephys.QualityMetrics.Waveform - - - - - -ephys.PreClusterTask - - -ephys.PreClusterTask - - - - - -ephys.EphysRecording->ephys.PreClusterTask - - - - -ephys.EphysRecording.EphysFile - - -ephys.EphysRecording.EphysFile - - - - - -ephys.EphysRecording->ephys.EphysRecording.EphysFile - - - - -ephys.PreCluster - - -ephys.PreCluster - - - - - -ephys.PreClusterTask->ephys.PreCluster - - - - -probe.ProbeType.Electrode - - -probe.ProbeType.Electrode - - - - - -probe.ElectrodeConfig.Electrode - - -probe.ElectrodeConfig.Electrode - - - - - -probe.ProbeType.Electrode->probe.ElectrodeConfig.Electrode - - - - -ephys.LFP - - -ephys.LFP - - - - - -ephys.PreCluster->ephys.LFP - - - - -ephys.ClusteringTask - - -ephys.ClusteringTask - - - - - -ephys.PreCluster->ephys.ClusteringTask - - - - -ephys.LFP.Electrode - - -ephys.LFP.Electrode - - - - - -probe.ElectrodeConfig.Electrode->ephys.LFP.Electrode - - - - -ephys.CuratedClustering.Unit - - -ephys.CuratedClustering.Unit - - - - - -probe.ElectrodeConfig.Electrode->ephys.CuratedClustering.Unit - - - - -ephys.WaveformSet.Waveform - - -ephys.WaveformSet.Waveform - - - - - -probe.ElectrodeConfig.Electrode->ephys.WaveformSet.Waveform - - - - -ephys.Curation - - -ephys.Curation - - - - - -ephys.CuratedClustering - - -ephys.CuratedClustering - - - - - -ephys.Curation->ephys.CuratedClustering - - - - -probe.ElectrodeConfig - - -probe.ElectrodeConfig - - - - - -probe.ElectrodeConfig->ephys.EphysRecording - - - - -probe.ElectrodeConfig->probe.ElectrodeConfig.Electrode - - - - -ephys.QualityMetrics - - -ephys.QualityMetrics - - - - - -ephys.CuratedClustering->ephys.QualityMetrics - - - - -ephys.WaveformSet - - -ephys.WaveformSet - - - - - -ephys.CuratedClustering->ephys.WaveformSet - - - - -ephys.CuratedClustering->ephys.CuratedClustering.Unit - - - - -ephys.InsertionLocation - - -ephys.InsertionLocation - - - - - -SkullReference - - -SkullReference - - - - - -SkullReference->ephys.InsertionLocation - - - - -ephys.QualityMetrics->ephys.QualityMetrics.Waveform - - - - -ephys.QualityMetrics.Cluster - - -ephys.QualityMetrics.Cluster - - - - - -ephys.QualityMetrics->ephys.QualityMetrics.Cluster - - - - -ephys.PreClusterParamSteps.Step - - -ephys.PreClusterParamSteps.Step - - - - - -ephys.ClusterQualityLabel - - -ephys.ClusterQualityLabel - - - - - -ephys.ClusterQualityLabel->ephys.CuratedClustering.Unit - - - - -session.Session - - -session.Session - - - - - -ephys.ProbeInsertion - - -ephys.ProbeInsertion - - - - - -session.Session->ephys.ProbeInsertion - - - - -ephys.ClusteringMethod - - -ephys.ClusteringMethod - - - - - -ephys.ClusteringParamSet - - -ephys.ClusteringParamSet - - - - - -ephys.ClusteringMethod->ephys.ClusteringParamSet - - - - -ephys.WaveformSet.PeakWaveform - - -ephys.WaveformSet.PeakWaveform - - - - - -ephys.WaveformSet->ephys.WaveformSet.PeakWaveform - - - - -ephys.WaveformSet->ephys.WaveformSet.Waveform - - - - -subject.Subject - - -subject.Subject - - - - - -subject.Subject->session.Session - - - - -ephys.LFP->ephys.LFP.Electrode - - - - -ephys.CuratedClustering.Unit->ephys.QualityMetrics.Waveform - - - - -ephys.CuratedClustering.Unit->ephys.QualityMetrics.Cluster - - - - -ephys.CuratedClustering.Unit->ephys.WaveformSet.PeakWaveform - - - - -ephys.CuratedClustering.Unit->ephys.WaveformSet.Waveform - - - - -ephys.Clustering - - -ephys.Clustering - - - - - -ephys.ClusteringTask->ephys.Clustering - - - - -probe.Probe - - -probe.Probe - - - - - -probe.Probe->ephys.ProbeInsertion - - - - -ephys.PreClusterMethod - - -ephys.PreClusterMethod - - - - - -ephys.PreClusterParamSet - - -ephys.PreClusterParamSet - - - - - -ephys.PreClusterMethod->ephys.PreClusterParamSet - - - - -ephys.ClusteringParamSet->ephys.ClusteringTask - - - - -probe.ProbeType - - -probe.ProbeType - - - - - -probe.ProbeType->probe.ProbeType.Electrode - - - - -probe.ProbeType->probe.ElectrodeConfig - - - - -probe.ProbeType->probe.Probe - - - - -ephys.ProbeInsertion->ephys.EphysRecording - - - - -ephys.ProbeInsertion->ephys.InsertionLocation - - - - -ephys.PreClusterParamSteps - - -ephys.PreClusterParamSteps - - - - - -ephys.PreClusterParamSteps->ephys.PreClusterTask - - - - -ephys.PreClusterParamSteps->ephys.PreClusterParamSteps.Step - - - - -ephys.Clustering->ephys.Curation - - - - -ephys.PreClusterParamSet->ephys.PreClusterParamSteps.Step - - - - \ No newline at end of file From 497110816058ae0655cac8f9414b4622905f78a6 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 19 Sep 2024 13:29:45 -0500 Subject: [PATCH 146/204] fix: use tempfile.TemporaryDirectory --- element_array_ephys/ephys_report.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/element_array_ephys/ephys_report.py b/element_array_ephys/ephys_report.py index c962d33d..0c6836a0 100644 --- a/element_array_ephys/ephys_report.py +++ b/element_array_ephys/ephys_report.py @@ -2,6 +2,7 @@ import datetime import pathlib +import tempfile from uuid import UUID import datajoint as dj @@ -53,7 +54,7 @@ class ProbeLevelReport(dj.Computed): def make(self, key): from .plotting.probe_level import plot_driftmap - save_dir = _make_save_dir() + save_dir = tempfile.TemporaryDirectory() units = ephys.CuratedClustering.Unit & key & "cluster_quality_label='good'" @@ -88,13 +89,15 @@ def make(self, key): fig_dict = _save_figs( figs=(fig,), fig_names=("drift_map_plot",), - save_dir=save_dir, + save_dir=save_dir.name, fig_prefix=fig_prefix, extension=".png", ) self.insert1({**key, **fig_dict, "shank": shank_no}) + save_dir.cleanup() + @schema class UnitLevelReport(dj.Computed): @@ -266,17 +269,10 @@ def make(self, key): ) -def _make_save_dir(root_dir: pathlib.Path = None) -> pathlib.Path: - if root_dir is None: - root_dir = pathlib.Path().absolute() - save_dir = root_dir / "temp_ephys_figures" - save_dir.mkdir(parents=True, exist_ok=True) - return save_dir - - def _save_figs( figs, fig_names, save_dir, fig_prefix, extension=".png" ) -> dict[str, pathlib.Path]: + save_dir = pathlib.Path(save_dir) fig_dict = {} for fig, fig_name in zip(figs, fig_names): fig_filepath = save_dir / (fig_prefix + "_" + fig_name + extension) From 63df4cda7d5ab97d1d52195acf0d7031fe2496f6 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 19 Sep 2024 16:27:13 -0500 Subject: [PATCH 147/204] format: black --- element_array_ephys/ephys.py | 9 +++++++-- element_array_ephys/spike_sorting/si_spike_sorting.py | 8 ++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index 02e1366e..ad9bb8d7 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -1068,9 +1068,14 @@ def make(self, key): } spike_locations = sorting_analyzer.get_extension("spike_locations") - extremum_channel_inds = si.template_tools.get_template_extremum_channel(sorting_analyzer, outputs="index") + extremum_channel_inds = si.template_tools.get_template_extremum_channel( + sorting_analyzer, outputs="index" + ) spikes_df = pd.DataFrame( - sorting_analyzer.sorting.to_spike_vector(extremum_channel_inds=extremum_channel_inds)) + sorting_analyzer.sorting.to_spike_vector( + extremum_channel_inds=extremum_channel_inds + ) + ) units = [] for unit_idx, unit_id in enumerate(si_sorting.unit_ids): diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 547fd8ce..e2f011e1 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -114,7 +114,9 @@ def make(self, key): spikeglx_recording.validate_file("ap") data_dir = spikeglx_meta_filepath.parent - si_extractor = si.extractors.neoextractors.spikeglx.SpikeGLXRecordingExtractor + si_extractor = ( + si.extractors.neoextractors.spikeglx.SpikeGLXRecordingExtractor + ) stream_names, stream_ids = si.extractors.get_neo_streams( "spikeglx", folder_path=data_dir ) @@ -125,7 +127,9 @@ def make(self, key): oe_probe = ephys.get_openephys_probe_data(key) assert len(oe_probe.recording_info["recording_files"]) == 1 data_dir = oe_probe.recording_info["recording_files"][0] - si_extractor = si.extractors.neoextractors.openephys.OpenEphysBinaryRecordingExtractor + si_extractor = ( + si.extractors.neoextractors.openephys.OpenEphysBinaryRecordingExtractor + ) stream_names, stream_ids = si.extractors.get_neo_streams( "openephysbinary", folder_path=data_dir From 2d57102880d872cf1a4ec037eee5892a87536ff2 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 19 Sep 2024 16:56:01 -0500 Subject: [PATCH 148/204] fix(spikeglx): minor bugfix in reading probe model --- element_array_ephys/readers/spikeglx.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/readers/spikeglx.py b/element_array_ephys/readers/spikeglx.py index 3e5f7a36..ca60648d 100644 --- a/element_array_ephys/readers/spikeglx.py +++ b/element_array_ephys/readers/spikeglx.py @@ -266,7 +266,7 @@ def __init__(self, meta_filepath): self.probe_PN = self.meta.get("imDatPrb_pn", "3A") # Infer npx probe model (e.g. 1.0 (3A, 3B) or 2.0) - probe_model = self.meta.get("imDatPrb_type", 1) + probe_model = self.meta.get("imDatPrb_type", 0) if probe_model < 1: if "typeEnabled" in self.meta and self.probe_PN == "3A": self.probe_model = "neuropixels 1.0 - 3A" From 800d060e83cac12bc87adcaf0061da5100263932 Mon Sep 17 00:00:00 2001 From: kushalbakshi Date: Fri, 20 Sep 2024 18:44:21 -0400 Subject: [PATCH 149/204] Update docs for new ephys release --- docs/docker-compose.yaml | 6 - docs/mkdocs.yaml | 13 +- docs/src/concepts.md | 32 ++--- docs/src/index.md | 32 ++--- docs/src/tutorials/index.md | 40 ++----- notebooks/demo_prepare.ipynb | 224 ----------------------------------- notebooks/demo_run.ipynb | 107 ----------------- 7 files changed, 36 insertions(+), 418 deletions(-) delete mode 100644 notebooks/demo_prepare.ipynb delete mode 100644 notebooks/demo_run.ipynb diff --git a/docs/docker-compose.yaml b/docs/docker-compose.yaml index 5ba221df..bc2c2b8b 100644 --- a/docs/docker-compose.yaml +++ b/docs/docker-compose.yaml @@ -30,12 +30,6 @@ services: export ELEMENT_UNDERSCORE=$$(echo $${PACKAGE} | sed 's/element_//g') export ELEMENT_HYPHEN=$$(echo $${ELEMENT_UNDERSCORE} | sed 's/_/-/g') export PATCH_VERSION=$$(cat /main/$${PACKAGE}/version.py | grep -oE '\d+\.\d+\.[a-z0-9]+') - git clone https://github.com/datajoint/workflow-$${ELEMENT_HYPHEN}.git /main/delete || true - if [ -d /main/delete/ ]; then - mv /main/delete/workflow_$${ELEMENT_UNDERSCORE} /main/ - mv /main/delete/notebooks/*ipynb /main/docs/src/tutorials/ - rm -fR /main/delete - fi if echo "$${MODE}" | grep -i live &>/dev/null; then mkdocs serve --config-file ./docs/mkdocs.yaml -a 0.0.0.0:80 2>&1 | tee docs/temp_mkdocs.log elif echo "$${MODE}" | grep -iE "qa|push" &>/dev/null; then diff --git a/docs/mkdocs.yaml b/docs/mkdocs.yaml index 5fdbffd2..e211069a 100644 --- a/docs/mkdocs.yaml +++ b/docs/mkdocs.yaml @@ -9,18 +9,7 @@ nav: - Concepts: concepts.md - Tutorials: - Overview: tutorials/index.md - - Data Download: tutorials/00-data-download-optional.ipynb - - Configure: tutorials/01-configure.ipynb - - Workflow Structure: tutorials/02-workflow-structure-optional.ipynb - - Process: tutorials/03-process.ipynb - - Automate: tutorials/04-automate-optional.ipynb - - Explore: tutorials/05-explore.ipynb - - Drop: tutorials/06-drop-optional.ipynb - - Downstream Analysis: tutorials/07-downstream-analysis.ipynb - - Visualizations: tutorials/10-data_visualization.ipynb - - Electrode Localization: tutorials/08-electrode-localization.ipynb - - NWB Export: tutorials/09-NWB-export.ipynb - - Quality Metrics: tutorials/quality_metrics.ipynb + - Tutorial: tutorials/tutorial.ipynb - Citation: citation.md - API: api/ # defer to gen-files + literate-nav - Changelog: changelog.md diff --git a/docs/src/concepts.md b/docs/src/concepts.md index f864b306..cb57a802 100644 --- a/docs/src/concepts.md +++ b/docs/src/concepts.md @@ -59,12 +59,16 @@ significant community uptake: Kilosort provides most automation and has gained significant popularity, being adopted as one of the key spike sorting methods in the majority of the teams/collaborations we have worked with. As part of our Year-1 NIH U24 effort, we provide support for data -ingestion of spike sorting results from Kilosort. Further effort will be devoted for the +ingestion of spike sorting results from Kilosort. + +Further effort has been devoted for the ingestion support of other spike sorting methods. On this end, a framework for unifying existing spike sorting methods, named [SpikeInterface](https://github.com/SpikeInterface/spikeinterface), has been developed by Alessio Buccino, et al. SpikeInterface provides a convenient Python-based wrapper to -invoke, extract, compare spike sorting results from different sorting algorithms. +invoke, extract, compare spike sorting results from different sorting algorithms. +SpikeInterface is the primary tool supported by Element Array Electrophysiology for +spike sorting as of version `1.0.0`. ## Key Partnerships @@ -95,22 +99,10 @@ Each of the DataJoint Elements creates a set of tables for common neuroscience d modalities to organize, preprocess, and analyze data. Each node in the following diagram is a table within the Element or a table connected to the Element. -### `ephys_acute` module +### `ephys` module ![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_acute.svg) -### `ephys_chronic` module - -![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_chronic.svg) - -### `ephys_precluster` module - -![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_precluster.svg) - -### `ephys_no_curation` module - -![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/attached_array_ephys_element_no_curation.svg) - ### `subject` schema ([API docs](https://datajoint.com/docs/elements/element-animal/api/element_animal/subject)) Although not required, most choose to connect the `Session` table to a `Subject` table. @@ -181,12 +173,11 @@ Major features of the Array Electrophysiology Element include: + Probe-insertion, ephys-recordings, LFP extraction, clusterings, curations, sorted units and the associated data (e.g. spikes, waveforms, etc.). - + Store/track/manage different curations of the spike sorting results - supporting - both curated clustering and kilosort triggered clustering (i.e., `no_curation`). + + Store/track/manage the spike sorting results. + Ingestion support for data acquired with SpikeGLX and OpenEphys acquisition systems. -+ Ingestion support for spike sorting outputs from Kilosort. -+ Triggering support for workflow integrated Kilosort processing. ++ Ingestion support for spike sorting outputs from SpikeInterface. ++ Triggering support for workflow integrated SpikeInterface processing. + Sample data and complete test suite for quality assurance. ## Data Export and Publishing @@ -208,8 +199,7 @@ pip install element-array-ephys[nwb] ## Roadmap -Incorporation of SpikeInterface into the Array Electrophysiology Element will be -on DataJoint Elements development roadmap. Dr. Loren Frank has led a development +Dr. Loren Frank has led a development effort of a DataJoint pipeline with SpikeInterface framework and NeurodataWithoutBorders format integrated [https://github.com/LorenFrankLab/nwb_datajoint](https://github.com/LorenFrankLab/nwb_datajoint). diff --git a/docs/src/index.md b/docs/src/index.md index b21edcfc..0c828c00 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -1,29 +1,23 @@ # Element Array Electrophysiology This Element features DataJoint schemas for analyzing extracellular array -electrophysiology data acquired with Neuropixels probes and spike sorted using Kilosort -spike sorter. Each Element is a modular pipeline for data storage and processing with +electrophysiology data acquired with Neuropixels probes and spike sorted using [SpikeInterface](https://github.com/SpikeInterface/spikeinterface). +Each Element is a modular pipeline for data storage and processing with corresponding database tables that can be combined with other Elements to assemble a fully functional pipeline. ![diagram](https://raw.githubusercontent.com/datajoint/element-array-ephys/main/images/diagram_flowchart.svg) -The Element is comprised of `probe` and `ephys` schemas. Several `ephys` schemas are -developed to handle various use cases of this pipeline and workflow: - -+ `ephys_acute`: A probe is inserted into a new location during each session. - -+ `ephys_chronic`: A probe is inserted once and used to record across multiple - sessions. - -+ `ephys_precluster`: A probe is inserted into a new location during each session. - Pre-clustering steps are performed on the data from each probe prior to Kilosort - analysis. - -+ `ephys_no_curation`: A probe is inserted into a new location during each session and - Kilosort-triggered clustering is performed without the option to manually curate the - results. - -Visit the [Concepts page](./concepts.md) for more information about the use cases of +The Element is comprised of `probe` and `ephys` schemas. Visit the +[Concepts page](./concepts.md) for more information about the `probe` and `ephys` schemas and an explanation of the tables. To get started with building your own data pipeline, visit the [Tutorials page](./tutorials/index.md). + +Prior to version `1.0.0` , several `ephys` schemas were +developed and supported to handle various use cases of this pipeline and workflow. These + are now deprecated but still available on their own branch within the repository: + +* [`ephys_acute`](https://github.com/datajoint/element-array-ephys/tree/main_ephys_acute) +* [`ephys_chronic`](https://github.com/datajoint/element-array-ephys/tree/main_ephys_chronic) +* [`ephys_precluster`](https://github.com/datajoint/element-array-ephys/tree/main_ephys_precluster) +* [`ephys_no_curation`](https://github.com/datajoint/element-array-ephys/tree/main_ephys_no_curation) diff --git a/docs/src/tutorials/index.md b/docs/src/tutorials/index.md index 5f367cd9..ff0bd1f5 100644 --- a/docs/src/tutorials/index.md +++ b/docs/src/tutorials/index.md @@ -1,14 +1,18 @@ # Tutorials +## Executing the Tutorial Notebooks + +The tutorials are set up to run using GitHub Codespaces. To run the tutorials, click on +the "Open in Codespaces" button from the GitHub repository. This will open a +pre-configured environment with a VSCode IDE in your browser. THe environment contains +all the necessary dependencies and sample data to run the tutorials. + ## Installation Installation of the Element requires an integrated development environment and database. Instructions to setup each of the components can be found on the -[User Instructions](https://datajoint.com/docs/elements/user-guide/) page. These -instructions use the example -[workflow for Element Array Ephys](https://github.com/datajoint/workflow-array-ephys), -which can be modified for a user's specific experimental requirements. This example -workflow uses several Elements (Lab, Animal, Session, Event, and Electrophysiology) to construct +[User Instructions](https://datajoint.com/docs/elements/user-guide/) page. The example +tutorial uses several Elements (Lab, Animal, Session, Event, and Electrophysiology) to construct a complete pipeline, and is able to ingest experimental metadata and run model training and inference. @@ -23,32 +27,10 @@ Electrophysiology. ### Notebooks Each of the notebooks in the workflow -([download here](https://github.com/datajoint/workflow-array-ephys/tree/main/notebooks) +([download here](https://github.com/datajoint/workflow-array-ephys/tree/main/notebooks)) steps through ways to interact with the Element itself. For convenience, these notebooks are also rendered as part of this site. To try out the Elements notebooks in an online Jupyter environment with access to example data, visit [CodeBook](https://codebook.datajoint.io/). (Electrophysiology notebooks coming soon!) -- [Data Download](./00-data-download-optional.ipynb) highlights how to use DataJoint - tools to download a sample model for trying out the Element. -- [Configure](./01-configure.ipynb) helps configure your local DataJoint installation to - point to the correct database. -- [Workflow Structure](./02-workflow-structure-optional.ipynb) demonstrates the table - architecture of the Element and key DataJoint basics for interacting with these - tables. -- [Process](./03-process.ipynb) steps through adding data to these tables and launching - key Electrophysiology features, like model training. -- [Automate](./04-automate-optional.ipynb) highlights the same steps as above, but - utilizing all built-in automation tools. -- [Explore](./05-explore.ipynb) demonstrates how to fetch data from the Element. -- [Drop schemas](./06-drop-optional.ipynb) provides the steps for dropping all the - tables to start fresh. -- [Downstream Analysis](./07-downstream-analysis.ipynb) highlights how to link - this Element to Element Event for event-based analyses. -- [Visualizations](./10-data_visualization.ipynb) highlights how to use a built-in module - for visualizing units, probes and quality metrics. -- [Electrode Localization](./08-electrode-localization.ipynb) demonstrates how to link - this Element to - [Element Electrode Localization](https://datajoint.com/docs/elements/element-electrode-localization/). -- [NWB Export](./09-NWB-export.ipynb) highlights the export functionality available for the - `no-curation` schema. +- [Tutorial](../../../notebooks/tutorial.ipynb) diff --git a/notebooks/demo_prepare.ipynb b/notebooks/demo_prepare.ipynb deleted file mode 100644 index 85ee1be2..00000000 --- a/notebooks/demo_prepare.ipynb +++ /dev/null @@ -1,224 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Demo Preparation Notebook\n", - "\n", - "**Please Note**: This notebook (`demo_prepare.ipynb`) and `demo_run.ipynb` are **NOT** intended to be used as learning materials. To gain\n", - "a thorough understanding of the DataJoint workflow for extracellular electrophysiology, please\n", - "see the [`tutorial`](./tutorial.ipynb) notebook." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Runs in about 45s\n", - "import datajoint as dj\n", - "import datetime\n", - "from tutorial_pipeline import subject, session, probe, ephys\n", - "from element_array_ephys import ephys_report" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "subject.Subject.insert1(\n", - " dict(subject=\"subject5\", subject_birth_date=\"2023-01-01\", sex=\"U\")\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "session_key = dict(subject=\"subject5\", session_datetime=\"2023-01-01 00:00:00\")\n", - "\n", - "session.Session.insert1(session_key)\n", - "\n", - "session.SessionDirectory.insert1(dict(session_key, session_dir=\"raw/subject5/session1\"))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "probe.Probe.insert1(dict(probe=\"714000838\", probe_type=\"neuropixels 1.0 - 3B\"))\n", - "\n", - "ephys.ProbeInsertion.insert1(\n", - " dict(\n", - " session_key,\n", - " insertion_number=1,\n", - " probe=\"714000838\",\n", - " )\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "populate_settings = {\"display_progress\": True}\n", - "\n", - "ephys.EphysRecording.populate(**populate_settings)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "kilosort_params = {\n", - " \"fs\": 30000,\n", - " \"fshigh\": 150,\n", - " \"minfr_goodchannels\": 0.1,\n", - " \"Th\": [10, 4],\n", - " \"lam\": 10,\n", - " \"AUCsplit\": 0.9,\n", - " \"minFR\": 0.02,\n", - " \"momentum\": [20, 400],\n", - " \"sigmaMask\": 30,\n", - " \"ThPr\": 8,\n", - " \"spkTh\": -6,\n", - " \"reorder\": 1,\n", - " \"nskip\": 25,\n", - " \"GPU\": 1,\n", - " \"Nfilt\": 1024,\n", - " \"nfilt_factor\": 4,\n", - " \"ntbuff\": 64,\n", - " \"whiteningRange\": 32,\n", - " \"nSkipCov\": 25,\n", - " \"scaleproc\": 200,\n", - " \"nPCs\": 3,\n", - " \"useRAM\": 0,\n", - "}\n", - "\n", - "ephys.ClusteringParamSet.insert_new_params(\n", - " clustering_method=\"kilosort2\",\n", - " paramset_idx=1,\n", - " params=kilosort_params,\n", - " paramset_desc=\"Spike sorting using Kilosort2\",\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "ephys.ClusteringTask.insert1(\n", - " dict(\n", - " session_key,\n", - " insertion_number=1,\n", - " paramset_idx=1,\n", - " task_mode=\"load\", # load or trigger\n", - " clustering_output_dir=\"processed/subject5/session1/probe_1/kilosort2-5_1\",\n", - " )\n", - ")\n", - "\n", - "ephys.Clustering.populate(**populate_settings)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "clustering_key = (ephys.ClusteringTask & session_key).fetch1(\"KEY\")\n", - "ephys.Curation().create1_from_clustering_task(clustering_key)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Runs in about 12m\n", - "ephys.CuratedClustering.populate(**populate_settings)\n", - "ephys.WaveformSet.populate(**populate_settings)\n", - "ephys_report.ProbeLevelReport.populate(**populate_settings)\n", - "ephys_report.UnitLevelReport.populate(**populate_settings)" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Drop schemas\n", - "- Schemas are not typically dropped in a production workflow with real data in it.\n", - "- At the developmental phase, it might be required for the table redesign.\n", - "- When dropping all schemas is needed, the following is the dependency order." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def drop_databases(databases):\n", - " import pymysql.err\n", - "\n", - " conn = dj.conn()\n", - "\n", - " with dj.config(safemode=False):\n", - " for database in databases:\n", - " schema = dj.Schema(f'{dj.config[\"custom\"][\"database.prefix\"]}{database}')\n", - " while schema.list_tables():\n", - " for table in schema.list_tables():\n", - " try:\n", - " conn.query(f\"DROP TABLE `{schema.database}`.`{table}`\")\n", - " except pymysql.err.OperationalError:\n", - " print(f\"Can't drop `{schema.database}`.`{table}`. Retrying...\")\n", - " schema.drop()\n", - "\n", - "\n", - "# drop_databases(databases=['analysis', 'trial', 'event', 'ephys_report', 'ephys', 'probe', 'session', 'subject', 'project', 'lab'])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.17" - }, - "vscode": { - "interpreter": { - "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/notebooks/demo_run.ipynb b/notebooks/demo_run.ipynb deleted file mode 100644 index 70fbb746..00000000 --- a/notebooks/demo_run.ipynb +++ /dev/null @@ -1,107 +0,0 @@ -{ - "cells": [ - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# DataJoint Workflow for Neuropixels Analysis\n", - "\n", - "+ This notebook demonstrates using the open-source DataJoint Element to build a workflow for extracellular electrophysiology.\n", - "+ For a detailed tutorial, please see the [tutorial notebook](./tutorial.ipynb)." - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Import dependencies" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import datajoint as dj\n", - "from tutorial_pipeline import subject, session, probe, ephys\n", - "from element_array_ephys.plotting.widget import main" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### View workflow" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "(\n", - " dj.Diagram(subject.Subject)\n", - " + dj.Diagram(session.Session)\n", - " + dj.Diagram(probe)\n", - " + dj.Diagram(ephys)\n", - ")" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Visualize processed data" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "main(ephys)" - ] - }, - { - "attachments": {}, - "cell_type": "markdown", - "metadata": {}, - "source": [ - "For an in-depth tutorial please see the [tutorial notebook](./tutorial.ipynb)." - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "python3p10", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.17" - }, - "vscode": { - "interpreter": { - "hash": "ff52d424e56dd643d8b2ec122f40a2e279e94970100b4e6430cb9025a65ba4cf" - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} From fc8ac1d8159e07714f1041bca67fa3546451f1a0 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Thu, 26 Sep 2024 18:57:02 +0200 Subject: [PATCH 150/204] chore(.github): add new GitHub Action callers for devcontainer, mkdocs, and semantic release --- .../workflows/devcontainer-build-publish-caller.yml | 10 ++++++++++ .github/workflows/mkdocs-release-caller.yml | 9 +++++++++ .github/workflows/semantic-release-caller.yml | 10 ++++++++++ 3 files changed, 29 insertions(+) create mode 100644 .github/workflows/devcontainer-build-publish-caller.yml create mode 100644 .github/workflows/mkdocs-release-caller.yml create mode 100644 .github/workflows/semantic-release-caller.yml diff --git a/.github/workflows/devcontainer-build-publish-caller.yml b/.github/workflows/devcontainer-build-publish-caller.yml new file mode 100644 index 00000000..2745efea --- /dev/null +++ b/.github/workflows/devcontainer-build-publish-caller.yml @@ -0,0 +1,10 @@ +name: devcontainer_build_publish +on: + workflow_dispatch: + +jobs: + call_devcontainer_build_publish: + uses: datajoint/.github/.github/workflows/devcontainer-build-publish.yaml@main + secrets: + DOCKERHUB_USERNAME: ${{secrets.DOCKERHUB_USERNAME}} + DOCKERHUB_TOKEN: ${{secrets.DOCKERHUB_TOKEN_FOR_ELEMENTS}} diff --git a/.github/workflows/mkdocs-release-caller.yml b/.github/workflows/mkdocs-release-caller.yml new file mode 100644 index 00000000..e25b0184 --- /dev/null +++ b/.github/workflows/mkdocs-release-caller.yml @@ -0,0 +1,9 @@ +name: mkdocs-release +on: + workflow_dispatch: + +jobs: + mkdocs_release: + uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main + permissions: + contents: write diff --git a/.github/workflows/semantic-release-caller.yml b/.github/workflows/semantic-release-caller.yml new file mode 100644 index 00000000..bfd7643d --- /dev/null +++ b/.github/workflows/semantic-release-caller.yml @@ -0,0 +1,10 @@ +name: semantic-release +on: + workflow_dispatch: + +jobs: + call_semantic_release: + uses: datajoint/.github/.github/workflows/semantic-release.yaml@main + secrets: + APP_ID: ${{ secrets.ELEMENT_APP_ID }} + GET_TOKEN_KEY: ${{ secrets.ELEMENT_GET_TOKEN_KEY }} From 6bad0011d63bbf5159794af7472a16943ec5298f Mon Sep 17 00:00:00 2001 From: semantic-release Date: Tue, 1 Oct 2024 13:45:43 +0000 Subject: [PATCH 151/204] 0.3.6 Automatically generated by python-semantic-release --- CHANGELOG.md | 2658 ++++++++++++++++++++++++++++++-- element_array_ephys/version.py | 2 +- 2 files changed, 2532 insertions(+), 128 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d2e48eaa..54926bd3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,180 +1,2584 @@ -# Changelog +# CHANGELOG -Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and - [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. +## v0.3.6 (2024-10-01) -## [0.3.5] - 2024-08-16 +### Chore -+ Fix - Improve `spikeglx` loader in extracting neuropixels probe type from the meta file -+ Update - Explicit call to `probe.create_neuropixels_probe_types()` to create entries in ProbeType +* chore(.github): add new GitHub Action callers for devcontainer, mkdocs, and semantic release ([`fc8ac1d`](https://github.com/datajoint/element-array-ephys/commit/fc8ac1d8159e07714f1041bca67fa3546451f1a0)) +### Fix -## [0.3.4] - 2024-03-22 +* fix(spikeglx): minor bugfix in reading probe model ([`2d57102`](https://github.com/datajoint/element-array-ephys/commit/2d57102880d872cf1a4ec037eee5892a87536ff2)) -+ Add - pytest -+ Update - Ephys schema changed from `ephys_acute` to `ephys_no_curation` in `tutorial.ipynb` +### Unknown +* Merge pull request #205 from MilagrosMarin/chore/update-gha -## [0.3.3] - 2024-01-24 -+ Update - remove PyPi release from `release.yml` since it will fail after the new `setup.py` +chore(.github): add new GitHub Action callers ([`d091ffc`](https://github.com/datajoint/element-array-ephys/commit/d091ffc86b6818fdcf16bfdafaa4f8829d771e7b)) -## [0.3.2] - 2024-01-12 -+ Fix - `probe_geometry` bugfix for incorrect handling of probes with staggered electrode positions +* Merge pull request #202 from ttngu207/main -## [0.3.1] - 2023-11-28 -+ Update - Flowchart borders for consistency with other DataJoint Elements -+ Fix - `dj.config()` setup moved to `tutorial_pipeline.py` instead of `__init__.py` -+ Update - Elements installed directly from GitHub instead of PyPI -+ Update - Structure of the tutorial notebook +fix(spikeglx): minor bugfix in reading npx probe model of older versions ([`780352b`](https://github.com/datajoint/element-array-ephys/commit/780352b51c3d002d6748bd54f61bd79169e07d95)) -## [0.3.0] - 2023-10-25 +## v0.3.5 (2024-08-19) -+ Add - DevContainer for codespaces -+ Add - `tutorial_pipeline.py` -+ Add - 60 min tutorial using Jupyter Notebooks, short demo notebook -+ Update - General improvements to increase consistency with other DataJoint Elements +### Fix -## [0.2.11] - 2023-06-29 +* fix(spikeglx): minor bugfix ([`6764f8c`](https://github.com/datajoint/element-array-ephys/commit/6764f8c1adb9a80569f75233028e551cf58d8917)) -+ Update - Improve kilosort triggering routine - better logging, remove temporary files, robust resumable processing -+ Add - Null value for `package_version` to patch bug -+ Update - GitHub Actions workflows -+ Update - README instructions +* fix: minor bugfix ([`e8870b9`](https://github.com/datajoint/element-array-ephys/commit/e8870b94cf6dc09b251e268c4102fb4b82149da2)) -## [0.2.10] - 2023-05-26 +* fix(probe): better handling of different Neuropixels probe types ([`aaec763`](https://github.com/datajoint/element-array-ephys/commit/aaec76339954b17a2dbef8aeaa84e92e64bdad35)) -+ Add - Kilosort, NWB, and DANDI citations -+ Fix - CSS to improve readability of tables in dark mode -+ Update - mkdocs.yaml +* fix(probe_geometry): bugfix in x_coords for probe with staggered electrode positions ([`54d4fac`](https://github.com/datajoint/element-array-ephys/commit/54d4facd38a79ba9b6e40c01174fdb04e6dee43d)) -## [0.2.9] - 2023-05-11 +### Unknown -+ Fix - `.ipynb` dark mode output for all notebooks. +* Merge pull request #199 from ttngu207/main -## [0.2.8] - 2023-04-28 +fix(probe): better handling of different Neuropixels probe types and SpikeGLX meta loader ([`71d9a42`](https://github.com/datajoint/element-array-ephys/commit/71d9a42b28280d42b021b2e42d492f4918f07cd2)) -+ Fix - `.ipynb` output in tutorials is not visible in dark mode. +* update: version and changelog ([`f754392`](https://github.com/datajoint/element-array-ephys/commit/f75439241693f82e75927d23637a4ae471dd6377)) -## [0.2.7] - 2023-04-19 +* rearrange: explicitly call `probe.create_neuropixels_probe_types()` to create entries in ProbeType ([`46679e6`](https://github.com/datajoint/element-array-ephys/commit/46679e605e116e13a2cc373148ea24127a2fc447)) -+ Bugfix - A name remapping dictionary was added to ensure consistency between the column names of the `metrics.csv` file and the attribute names of the `QualityMetrics` table +* Merge branch 'dev_separated_create_probe' into nei_nienborg ([`7f52f59`](https://github.com/datajoint/element-array-ephys/commit/7f52f594ac4d24b2210cc3e2bee4adf0f4c3c913)) -## [0.2.6] - 2023-04-17 +* Merge pull request #198 from BrainCOGS/adding-raw-strings-and-package-minimum -+ Fix - Update Pandas DataFrame column name to insert `pt_ratio` in `QualityMetrics.Waveform` table +Fix regex patterns and add minimum version for scikit-image ([`27c56ea`](https://github.com/datajoint/element-array-ephys/commit/27c56ea92ba0c5d089a2c1e77cbffb52d51dcf6c)) -## [0.2.5] - 2023-04-12 +* Added minimum version to the setup.py for scikit-image ([`711dd48`](https://github.com/datajoint/element-array-ephys/commit/711dd48a5396d1e7ba36410c5f141be6940e9c11)) -+ Add - docstrings for quality metric tables -+ Fix - docstring errors -+ Update - `concepts.md` -+ Update - schema diagrams with quality metrics tables +* Provided raw annotation for strings with unsupported escape regex sequences ([`f59b4ab`](https://github.com/datajoint/element-array-ephys/commit/f59b4abf3f1dae42990ef02cd3ff1e6c341aa861)) -## [0.2.4] - 2023-03-10 +* Merge pull request #185 from datajoint/pytest -+ Update - Requirements with `ipywidgets` and `scikit-image` for plotting widget +Add pytest ([`9299142`](https://github.com/datajoint/element-array-ephys/commit/9299142605c4b16c14edfe9a44f40f242f25839a)) -## [0.2.3] - 2023-02-14 +* apply black formatting ([`333f411`](https://github.com/datajoint/element-array-ephys/commit/333f4118ecbf3eee348fa3671b7da3249302167b)) -+ Add - extras_require install options for nwb and development requirement sets -+ Add - mkdocs notebook rendering -+ Add - markdown linting and spellcheck config files, with implementation edits -+ Update - license for 2023 -+ Update - blackify previous updates +* update CHANGELOG.md & bump version ([`a3426eb`](https://github.com/datajoint/element-array-ephys/commit/a3426ebe9d9b03f61bef231c17032d5ad2e5c8cd)) -## [0.2.2] - 2022-01-11 +* tested version of pytest suite ([`9c033c4`](https://github.com/datajoint/element-array-ephys/commit/9c033c4f355a831dabb8537cfb12dba76c8badab)) -+ Bugfix - Revert import order in `__init__.py` to avoid circular import error. -+ Update - `.pre-commit-config.yaml` to disable automatic positioning of import - statement at the top. -+ Bugfix - Update docstrings to render API for documentation website. +* switch ephys to no_curation in tutorial notebook ([`3ae2cbc`](https://github.com/datajoint/element-array-ephys/commit/3ae2cbcc558f17968403e00c983eca1049e51721)) -## [0.2.1] - 2022-01-06 +* move tutorial_pipeline.py to tests ([`591a0ed`](https://github.com/datajoint/element-array-ephys/commit/591a0ed0857601517a62acd73967c726269e5eb2)) -+ Add - `build_electrode_layouts` function in `probe.py` to compute the electrode layout - for all types of probes. -+ Update - parameterize run_CatGT step from parameters retrieved from - `ClusteringParamSet` table -+ Update - clustering step, update duration for "median_subtraction" step -+ Bugfix - handles single probe recording in "Neuropix-PXI" format -+ Update - safeguard in creating/inserting probe types upon probe activation -+ Add - quality control metric dashboard -+ Update & fix docstrings -+ Update - `ephys_report.UnitLevelReport` to add `ephys.ClusterQualityLabel` as a - foreign key reference -+ Add - `.pre-commit-config.yaml` +* setup pytest fixture ([`92026a6`](https://github.com/datajoint/element-array-ephys/commit/92026a614ae61b874d4a4692acc5fc0ad06bd560)) -## [0.2.0] - 2022-10-28 +* Merge pull request #183 from MilagrosMarin/main -+ Add - New schema `ephys_report` to compute and store figures from results -+ Add - Widget to display figures -+ Add - Add `ephys_no_curation` and routines to trigger spike-sorting analysis - using Kilosort (2.0, 2.5) -+ Add - mkdocs for Element Documentation -+ Add - New `QualityMetrics` table to store clusters' and waveforms' metrics after the - spike sorting analysis. +Minor change: remove pypi release from `release.yaml` ([`cc36465`](https://github.com/datajoint/element-array-ephys/commit/cc36465dc56a9e299a86e54329e806899c6bcf73)) -## [0.1.4] - 2022-07-11 +* update version and changelog ([`5cfc269`](https://github.com/datajoint/element-array-ephys/commit/5cfc26921633e89df5fb16637dd61b88361d73d7)) -+ Bugfix - Handle case where `spike_depths` data is present. +* remove PyPi release ([`dc7863e`](https://github.com/datajoint/element-array-ephys/commit/dc7863edde2431114db8992cb419782b28eaa3ce)) -## [0.1.3] - 2022-06-16 +* Merge pull request #182 from datajoint/staging -+ Update - Allow for the `precluster_output_dir` attribute to be nullable when no - pre-clustering is performed. +fix(probe_geometry): bugfix in x_coords for probe with staggered electrode positions ([`95d25f2`](https://github.com/datajoint/element-array-ephys/commit/95d25f2f76e3e8435eb3c8b199437df581aa3916)) -## [0.1.2] - 2022-06-09 +* Merge pull request #181 from ttngu207/main -+ Bugfix - Handle case where `pc_features.npy` does not exist. +fix(probe_geometry): bugfix in x_coords for probe with staggered electrode positions ([`d65b70f`](https://github.com/datajoint/element-array-ephys/commit/d65b70fd56c89d851b8c29819fe70a219cb81838)) -## [0.1.1] - 2022-06-01 +* update(version): update CHANGELOG and version ([`2e79f3d`](https://github.com/datajoint/element-array-ephys/commit/2e79f3d984272ce97709e7c83bb153ab6a2a452a)) -+ Add - Secondary attributes to `PreClusterParamSteps` table +## v0.3.1 (2024-01-04) -## [0.1.0] - 2022-05-26 +### Unknown -+ Update - Rename module for acute probe insertions from `ephys.py` to `ephys_acute.py`. -+ Add - Module for pre-clustering steps (`ephys_precluster.py`), which is built off of - `ephys_acute.py`. -+ Add - Module for chronic probe insertions (`ephys_chronic.py`). -+ Bugfix - Missing `fileTimeSecs` key in SpikeGLX meta file. -+ Update - Move common functions to `element-interface` package. -+ Add - NWB export function +* Merge pull request #178 from MilagrosMarin/main -## [0.1.0b4] - 2021-11-29 +Update CHANGELOG and `version.py` ([`6184b2f`](https://github.com/datajoint/element-array-ephys/commit/6184b2fa51db9e430967bbb618b15c0b65549613)) -+ Add - Processing with Kilosort and pyKilosort for Open Ephys and SpikeGLX +* CHANGELOG and `version.py` updated ([`bfc0c0a`](https://github.com/datajoint/element-array-ephys/commit/bfc0c0a7e31903cf2f74d5675f06a896e4705769)) -## [0.1.0b0] - 2021-05-07 +* Merge pull request #176 from kushalbakshi/main -+ Update - First beta release +Update setup dependencies + tutorial setup + fix diagram ([`0174478`](https://github.com/datajoint/element-array-ephys/commit/01744781b27ee4cad16c18bb2e6a1fea175e038c)) -## [0.1.0a5] - 2021-05-05 +* Minor fixes and updates to notebook ([`1ea7c89`](https://github.com/datajoint/element-array-ephys/commit/1ea7c89993465eaa34e02864f654c654b9e7285c)) -+ Add - GitHub Action release process -+ Add - `probe` and `ephys` elements -+ Add - Readers for: `SpikeGLX`, `Open Ephys`, `Kilosort` -+ Add - Probe table supporting: Neuropixels probes 1.0 - 3A, 1.0 - 3B, 2.0 - SS, - 2.0 - MS +* Fix typo in setup.py ([`b919ca3`](https://github.com/datajoint/element-array-ephys/commit/b919ca34432c3189d934a3a75bdb071fe8bcb6b9)) -[0.3.0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.3.0 -[0.2.11]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.11 -[0.2.10]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.10 -[0.2.9]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.9 -[0.2.8]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.8 -[0.2.7]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.7 -[0.2.6]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.6 -[0.2.5]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.5 -[0.2.4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.4 -[0.2.3]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.3 -[0.2.2]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.2 -[0.2.1]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.1 -[0.2.0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.0 -[0.1.4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.4 -[0.1.3]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.3 -[0.1.2]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.2 -[0.1.1]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.1 -[0.1.0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0 -[0.1.0b4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0b4 -[0.1.0b0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0b0 -[0.1.0a5]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0a5 +* Black formatting ([`16d36d5`](https://github.com/datajoint/element-array-ephys/commit/16d36d56fad9e0e5b97af2fa57df065a664917cf)) + +* Move dj_config setup to `tutorial_pipeline.py` ([`0dbdde7`](https://github.com/datajoint/element-array-ephys/commit/0dbdde70a27054f5489399daca4743f40c34ce29)) + +* Remove PyPI versioning in setup ([`b979fec`](https://github.com/datajoint/element-array-ephys/commit/b979feca44468a33601af36c1db3f917993844df)) + +* Markdown structural edits ([`b2c4901`](https://github.com/datajoint/element-array-ephys/commit/b2c4901095c6d16b43302bbb4da43a0ad813bc6f)) + +* Run tutorial notebook ([`43ff0d4`](https://github.com/datajoint/element-array-ephys/commit/43ff0d42a1abe89f6bf3f3f334b32773ca0d2c37)) + +* Minor fixes to README ([`da2239d`](https://github.com/datajoint/element-array-ephys/commit/da2239dbc06dc018f78823c6c36a7f83ff48a5d4)) + +* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`9a8865f`](https://github.com/datajoint/element-array-ephys/commit/9a8865f74ec9d4c1e5b661d04bd3f54932ac53e4)) + +* Update path to flowchart diagram ([`f424a0f`](https://github.com/datajoint/element-array-ephys/commit/f424a0f7726e8dcfafe99b2f053af194d856a536)) + +* Fix heading styling ([`6323d4e`](https://github.com/datajoint/element-array-ephys/commit/6323d4ee7ac4cd07b494ce93cca8ac159c0bf843)) + +* Minor updates for formatting and structure ([`8717528`](https://github.com/datajoint/element-array-ephys/commit/87175289eb3a0aa1d401bb41f5b9f7e73c62659a)) + +* Added diagram_flowchart.svg ([`bf63fe1`](https://github.com/datajoint/element-array-ephys/commit/bf63fe1b6887075a123de99a5b00fd20f9ee9561)) + +* Added diagram_flowchart.svg ([`4a4104c`](https://github.com/datajoint/element-array-ephys/commit/4a4104c92a370162a4cb51642d337b3567ef28fe)) + +* Merge branch 'datajoint:main' into codespace ([`b66ccee`](https://github.com/datajoint/element-array-ephys/commit/b66cceeda3305c4af75befaaee9e91aa2704bb19)) + +* Update diagram_flowchart.drawio ([`4f58c68`](https://github.com/datajoint/element-array-ephys/commit/4f58c68562820c583ac04fccd8a394e729adafdb)) + +* Merge pull request #175 from A-Baji/main + +revert: :memo: revert docs dark mode cell text color ([`d24f936`](https://github.com/datajoint/element-array-ephys/commit/d24f936b15cfbb138a4490d3cec3fbd1a5a84e69)) + +* revert: :memo: revert table style ([`62172c4`](https://github.com/datajoint/element-array-ephys/commit/62172c459c643e66b559c7c4af1943901a890a89)) + +* Merge pull request #174 from MilagrosMarin/update_tutorial + +Improvements in `element-array-ephys` tutorial and README ([`fe4a844`](https://github.com/datajoint/element-array-ephys/commit/fe4a8444ef213fd70625053ec776d6081ac695c6)) + +* tutorial run with included outputs complete ([`2a59ea0`](https://github.com/datajoint/element-array-ephys/commit/2a59ea0e19ceb11ae43892f7309a119a3ffdfa57)) + +* Tutorial run with included outputs ([`40eec3e`](https://github.com/datajoint/element-array-ephys/commit/40eec3e8441998970e16ebb34f375f3e6647fd8d)) + +* Revert deleting `SessionDirectory` insertion ([`ad69298`](https://github.com/datajoint/element-array-ephys/commit/ad692986c3ba73f0885f401e8114bd66e17c4826)) + +* add markdown in setup ([`abc82ba`](https://github.com/datajoint/element-array-ephys/commit/abc82ba215d67912482981fe2b2766e7a4bccff8)) + +* ephys tutorial preliminary review to mirror dlc ([`9e16a23`](https://github.com/datajoint/element-array-ephys/commit/9e16a23ff1c8ab7002a013ab4bf4057cd9902253)) + +* Merge pull request #173 from kushalbakshi/codespace + +Add DevContainers + Codespaces tutorial ([`140384e`](https://github.com/datajoint/element-array-ephys/commit/140384ee293a366d03900e490fe03413b7d8531b)) + +* review PR tutorial ([`733d2b1`](https://github.com/datajoint/element-array-ephys/commit/733d2b1ee9702a6d4e391b6ce62e373534ffdd0a)) + +* Fix typo in tutorial heading ([`820b282`](https://github.com/datajoint/element-array-ephys/commit/820b282e29eab4793c56ab99ac29ac897f3bdd33)) + +* Merge branch 'codespace' of https://github.com/kushalbakshi/element-array-ephys into codespace ([`a993b8d`](https://github.com/datajoint/element-array-ephys/commit/a993b8d99a964c1cfb599bc7411f1ff27a0c7c9b)) + +* Updated diagram_flowchart.svg ([`a376c90`](https://github.com/datajoint/element-array-ephys/commit/a376c90a6f495f84af9611b8e8293272892c3c29)) + +* Fix typo ([`d1657b2`](https://github.com/datajoint/element-array-ephys/commit/d1657b2c57c1d8733cb88bce326511679a575fe3)) + +* Update README + minor fixes ([`b9fd4a3`](https://github.com/datajoint/element-array-ephys/commit/b9fd4a35f19587ea6527d968bb6b42e5afa880b2)) + +* Update diagram_flowchart.drawio ([`a08736c`](https://github.com/datajoint/element-array-ephys/commit/a08736cd58b2c43bdeb6e9d0d35b69ce818174f8)) + +* Update diagram_flowchart.svg ([`f9fc3ec`](https://github.com/datajoint/element-array-ephys/commit/f9fc3ec002160fb0bfe36c579019637bd6ab285e)) + +* Updated diagram_flowchart.svg ([`bb2f507`](https://github.com/datajoint/element-array-ephys/commit/bb2f507704b7b15bbe0f887dffee0c98018828e3)) + +* Update diagram_flowchart.drawio ([`6328398`](https://github.com/datajoint/element-array-ephys/commit/632839825214c6c66baa29cd7136bc0bc46f0f3a)) + +* Complete demo notebooks ([`21fde13`](https://github.com/datajoint/element-array-ephys/commit/21fde1351084c6f73751dd47f0024c9b9e6487ad)) + +* Black formatting ([`5d57ff2`](https://github.com/datajoint/element-array-ephys/commit/5d57ff2c1d12963391b3318d091a0ac1ca66db6d)) + +* Update demo presentation notebook ([`1da15db`](https://github.com/datajoint/element-array-ephys/commit/1da15dbd1bc9d759fc2c3cf1dd6e1388f5645fad)) + +* Add demo notebooks ([`d02d8a5`](https://github.com/datajoint/element-array-ephys/commit/d02d8a585b575ea60b01caae953df4176a40de01)) + +* Completed tutorial ([`7bf9f9f`](https://github.com/datajoint/element-array-ephys/commit/7bf9f9f3cf80f32963ed421221a4cd405aef6dd8)) + +* Update root_data_dir in Dockerfile ([`d5430aa`](https://github.com/datajoint/element-array-ephys/commit/d5430aa93b507baf4923acda3d3eb8663e480a23)) + +* Update Dockerfile and tutorial_pipeline to fix errors ([`1717054`](https://github.com/datajoint/element-array-ephys/commit/1717054c4eefb1176be2a54bc9e75b80508b62a0)) + +* Use `session_with_datetime` for tutorial ([`ce6e3bf`](https://github.com/datajoint/element-array-ephys/commit/ce6e3bf8ee3968f5b1d1b97e1d7d238272b6c073)) + +* Update `get_logger` to `dj.logger` ([`b2180c4`](https://github.com/datajoint/element-array-ephys/commit/b2180c457e86303ac816bd0acb94c99fb1097821)) + +* Markdown improvements in tutorial ([`38c50fb`](https://github.com/datajoint/element-array-ephys/commit/38c50fbaad8cc2a3d3d717ed4c2d5a577fa908e9)) + +* Upsdate tutorial markdown ([`4190925`](https://github.com/datajoint/element-array-ephys/commit/41909257e34b37c9197943dc75d855c31f9cda89)) + +* Merge branch 'codespace' of https://github.com/kushalbakshi/element-array-ephys into codespace ([`69cef22`](https://github.com/datajoint/element-array-ephys/commit/69cef2204070e258e40e7ef43ba65200be3d560f)) + +* Update `.gitignore` to include Codespaces ([`f5ab71d`](https://github.com/datajoint/element-array-ephys/commit/f5ab71d8abfcfe973d9792e91307ed705d56f54b)) + +* Update root data dir env ([`1bea230`](https://github.com/datajoint/element-array-ephys/commit/1bea230d0789be6632c8dbb78139d9a2b8f92421)) + +* Add tutorial notebook ([`caf5c91`](https://github.com/datajoint/element-array-ephys/commit/caf5c9109d43e373c262d3757c0bc3edd54d416f)) + +* Allow build step in docker-compose ([`dcd768a`](https://github.com/datajoint/element-array-ephys/commit/dcd768a0e7bc799b9da968ed8831738d1facbee1)) + +* Black formatting ([`6c6afe4`](https://github.com/datajoint/element-array-ephys/commit/6c6afe4778466b26dbf846c84d1d77daf8672ca7)) + +* Enable devcontainer builds in CICD ([`5e2d7be`](https://github.com/datajoint/element-array-ephys/commit/5e2d7bef950f70007dc418c9975c22b3488c95a1)) + +* First commit for codespace compatability ([`5f756c3`](https://github.com/datajoint/element-array-ephys/commit/5f756c36675e5191f2b19d2581dcf1a4b0991729)) + +* Merge pull request #169 from ttngu207/new_spikeglx_and_probeinterface + +New spikeglx and probeinterface ([`3b8efe5`](https://github.com/datajoint/element-array-ephys/commit/3b8efe52fcc16eae13d918a11dc5c1e89378c93e)) + +* address PR comments + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`f42f1fc`](https://github.com/datajoint/element-array-ephys/commit/f42f1fcff03f0c312d8e09ea50828bf2a77b33b5)) + +* address PR comments + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`452ff31`](https://github.com/datajoint/element-array-ephys/commit/452ff31c952f641f329771c22c39a4e6845d7588)) + +* bugfix ([`4407678`](https://github.com/datajoint/element-array-ephys/commit/44076782dcb86af3309fe2bda909d971f9819266)) + +* bugfix in geomap to shank conversion ([`1514613`](https://github.com/datajoint/element-array-ephys/commit/1514613fb9b74c5be628f2d5e53882ea6f4e7da1)) + +* transform geom map to shank map ([`9857aef`](https://github.com/datajoint/element-array-ephys/commit/9857aef544ebabc84e4906b421420bc2407b55a6)) + +* update spikeglx loader to handle spikeglx ver 20230202 ([`3994fc7`](https://github.com/datajoint/element-array-ephys/commit/3994fc75b10f6d5a92e6c7b664067641716d518e)) + +* incorporate probeinterface and probe geometry for all npx probes ([`224b1c7`](https://github.com/datajoint/element-array-ephys/commit/224b1c7049c9e246df22fe3a46fbd357d3096d8b)) + +## v0.2.11 (2023-06-30) + +### Unknown + +* Merge pull request #165 from kabilar/patch + +Patch issue with nullable attributes ([`e4dd98a`](https://github.com/datajoint/element-array-ephys/commit/e4dd98a3541271e041538daa053f158c1b9f8c63)) + +* Temporarily remove Docker image builds ([`48a1e76`](https://github.com/datajoint/element-array-ephys/commit/48a1e768ad6b8cf05bf519cdcbf0e503aa73e613)) + +* Format with black ([`d5de596`](https://github.com/datajoint/element-array-ephys/commit/d5de59661c21992dbc9104f6ce8ca9c26e64cc91)) + +* Update image path ([`2557877`](https://github.com/datajoint/element-array-ephys/commit/25578773db6a478c54486ebcfc9010d7c23fa87e)) + +* Update text ([`8a764e8`](https://github.com/datajoint/element-array-ephys/commit/8a764e85f0645fb38d2d74f87fdfb73260bb2524)) + +* Update readme ([`c530671`](https://github.com/datajoint/element-array-ephys/commit/c5306715508891428e69203172091664c6d34c7a)) + +* Update changelog ([`d1cf13f`](https://github.com/datajoint/element-array-ephys/commit/d1cf13f8595c0fe6d90a0dd029a65b73d8ecec4a)) + +* Update GitHub Actions ([`71bb8e2`](https://github.com/datajoint/element-array-ephys/commit/71bb8e2a489d044a01c4328027630c5f8f34b6cf)) + +* Update version and changelog ([`d4f7fe0`](https://github.com/datajoint/element-array-ephys/commit/d4f7fe080eb5fe94f518c2db5b17ffef4448dee2)) + +* Add default value ([`01ad1e8`](https://github.com/datajoint/element-array-ephys/commit/01ad1e85a3e2db48c198b2d0d32096152ffba295)) + +* Merge pull request #142 from ttngu207/main + +Update kilosort_triggering.py ([`1d30cb8`](https://github.com/datajoint/element-array-ephys/commit/1d30cb81c258d396aba16a38ed20fbfb0e55a052)) + +* update CHANGELOG ([`5e1f055`](https://github.com/datajoint/element-array-ephys/commit/5e1f0555349a11b5b51498d13e209a28781c1b11)) + +* Merge branch 'datajoint:main' into main ([`c5f20b0`](https://github.com/datajoint/element-array-ephys/commit/c5f20b0063363e4946ee059838db34c0dc3c57ac)) + +## v0.2.10 (2023-05-26) + +### Unknown + +* Merge pull request #151 from kabilar/main + +Fix readability of tables in dark mode ([`47dea95`](https://github.com/datajoint/element-array-ephys/commit/47dea95466cb771f19807c8e0499efc5e3f2f577)) + +* Update citation ([`100913e`](https://github.com/datajoint/element-array-ephys/commit/100913e772c64bc482fde088842e184972ba479f)) + +* Update changelog ([`0bfca62`](https://github.com/datajoint/element-array-ephys/commit/0bfca629a963196470f4dc291ba1678b58a2829c)) + +* Update CSS ([`15e9ddb`](https://github.com/datajoint/element-array-ephys/commit/15e9ddb4e8fe21f078b623c5eb64131f6255d82a)) + +* Merge pull request #150 from kabilar/main + +Add Kilosort, NWB, and DANDI citations ([`ad9588f`](https://github.com/datajoint/element-array-ephys/commit/ad9588fb1c1293d1b8e598f573b280f06ba4e750)) + +* Add NWB and DANDI citations ([`af81ef9`](https://github.com/datajoint/element-array-ephys/commit/af81ef973859a0eea890d3c6ff513640a9255e16)) + +* Update citation page ([`aee35f7`](https://github.com/datajoint/element-array-ephys/commit/aee35f7918d4d9c66fa97d8e1795b66f43e58996)) + +* Update changelog ([`0ca91fa`](https://github.com/datajoint/element-array-ephys/commit/0ca91fa11ebf11b7dcbe600c628f63fc8c32078c)) + +* Update changelog ([`f89eae4`](https://github.com/datajoint/element-array-ephys/commit/f89eae42128c2ca3f068aecf05d04ca214f40282)) + +* Add plugin ([`4436b05`](https://github.com/datajoint/element-array-ephys/commit/4436b056d3408af20d448068f5ed6d27b8486465)) + +* Remove redirects ([`c798564`](https://github.com/datajoint/element-array-ephys/commit/c798564673444260e2a1094bf2cfd615203283da)) + +* Update changelog ([`b63031c`](https://github.com/datajoint/element-array-ephys/commit/b63031cf788fe075b410cc23f15d8efc90819895)) + +* Add citation ([`69e76dd`](https://github.com/datajoint/element-array-ephys/commit/69e76dd58e9785f3d4f93e78c3c7b86006e1eae4)) + +* Merge pull request #149 from kushalbakshi/main + +Fix notebook output in dark mode ([`97a57b1`](https://github.com/datajoint/element-array-ephys/commit/97a57b158b116cf8d0b6e84ecc1fe737f3176366)) + +## v0.2.9 (2023-05-11) + +### Unknown + +* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`e4809ba`](https://github.com/datajoint/element-array-ephys/commit/e4809ba249f3885ff578123eba6e479ad672a9f0)) + +* Merge pull request #148 from kushalbakshi/main + +Fix docs tutorials in dark mode ([`96d1187`](https://github.com/datajoint/element-array-ephys/commit/96d118777d5a0f736ea2ca224eb90526d7637616)) + +* Dark mode notebooks fix ([`9aab33d`](https://github.com/datajoint/element-array-ephys/commit/9aab33da951bcddb8515f91ade88aec783631113)) + +## v0.2.8 (2023-04-28) + +### Unknown + +* Fix docs tutorials in dark mode ([`d2367ce`](https://github.com/datajoint/element-array-ephys/commit/d2367ce4e430273ada830d9c6f4eefdde66c2637)) + +* Merge pull request #146 from JaerongA/metrics + +Remap `metrics.csv` column names ([`6aef807`](https://github.com/datajoint/element-array-ephys/commit/6aef8074a16af945c4c6d928bf480daa6e4d1401)) + +## v0.2.7 (2023-04-19) + +### Unknown + +* update changelog and version ([`6b069e6`](https://github.com/datajoint/element-array-ephys/commit/6b069e68efbe933c1324a7afbd10276732c3b49e)) + +* add column name mapping for metrics.csv ([`c97d509`](https://github.com/datajoint/element-array-ephys/commit/c97d5090c445ed6c4f8595cf64dfb59eb965545e)) + +## v0.2.6 (2023-04-18) + +### Unknown + +* Merge pull request #143 from kabilar/main + +Update version and changelog for release ([`5abecc3`](https://github.com/datajoint/element-array-ephys/commit/5abecc3ccf0a3e72e5807e67a1a11d875953450e)) + +* Update `ephys_precluster` ([`5993d6e`](https://github.com/datajoint/element-array-ephys/commit/5993d6eeb1e3ab57d1cca96a47e32440170c0477)) + +* Update version and changelog ([`e9b66af`](https://github.com/datajoint/element-array-ephys/commit/e9b66aff50f78715420518ec5470ec1e1435abaf)) + +* Merge `main` of datajoint/element-array-ephys ([`40b5a6d`](https://github.com/datajoint/element-array-ephys/commit/40b5a6df8c884ee36615d3b20fd2f838ac405062)) + +* Merge pull request #144 from JaerongA/main ([`e487f3a`](https://github.com/datajoint/element-array-ephys/commit/e487f3a2083f3c25bbe160eba1ac59d4707e3793)) + +* lowercase all column names in metrics.csv ([`f35ba0b`](https://github.com/datajoint/element-array-ephys/commit/f35ba0b383efb8d59448eb6220a6a4dab153f41d)) + +* Merge pull request #138 from JaerongA/main + +Update docs for quality metrics ([`aabc454`](https://github.com/datajoint/element-array-ephys/commit/aabc45420eaead26966309ecedbfec513e89a771)) + +## v0.2.5 (2023-04-13) + +### Unknown + +* remove schema tag in SkullReference ([`7192958`](https://github.com/datajoint/element-array-ephys/commit/7192958dcb188f6f72c363eefa333786acfd0216)) + +* add a new tag ([`b8ef2d9`](https://github.com/datajoint/element-array-ephys/commit/b8ef2d9d2069cce2e0641ea63dd461b60634a39b)) + +* update schema diagrams to show SkullReference ([`8ffe6df`](https://github.com/datajoint/element-array-ephys/commit/8ffe6dfb9dbe9ee1b421cb05472daf07c1a1428e)) + +* Update CHANGELOG.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`166c00a`](https://github.com/datajoint/element-array-ephys/commit/166c00a20a48805cbc3fe8c3dc300b7c9bd8a7ae)) + +* Update docs/src/concepts.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`4a304d8`](https://github.com/datajoint/element-array-ephys/commit/4a304d88d77d0f7395916881c0110019ce72d803)) + +* bump version ([`7ceae8b`](https://github.com/datajoint/element-array-ephys/commit/7ceae8bac60409d50fd105c9ccd13177ff5f4339)) + +* add schema diagrams ([`eb5b0b1`](https://github.com/datajoint/element-array-ephys/commit/eb5b0b10b7efc98c2aa47ebd8c709758bfa6bfea)) + +* add quality_metrics.ipynb to mkdocs ([`977b90a`](https://github.com/datajoint/element-array-ephys/commit/977b90a80dec5034aa78eb90f9810df8b0ff942b)) + +* Update docs/src/concepts.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`27d5742`](https://github.com/datajoint/element-array-ephys/commit/27d57420e6ce7107c1b3b73acb5ac436c0155a4a)) + +* Update element_array_ephys/ephys_report.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`1e37791`](https://github.com/datajoint/element-array-ephys/commit/1e3779182767b699faac78e952a1e4e64f4e2854)) + +* Update requirements.txt + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`46b911b`](https://github.com/datajoint/element-array-ephys/commit/46b911b6df8a49551e5042b5d422135b4585ffc7)) + +* add nbformat to requirements.txt ([`2f38cfa`](https://github.com/datajoint/element-array-ephys/commit/2f38cfa099ab62561c6dbbfdbd6e5ea233de233d)) + +* update requirements.txt ([`eb0d795`](https://github.com/datajoint/element-array-ephys/commit/eb0d7955f53879c511439b77d1f5561a36049e85)) + +* update concepts.md ([`d362792`](https://github.com/datajoint/element-array-ephys/commit/d362792891e9e4c1e3b5f604d070d32cb8e24a23)) + +* fix docstring error in qc.py ([`ea03674`](https://github.com/datajoint/element-array-ephys/commit/ea036742dc28421bad116e897055b867eff6c54a)) + +* add docstring for qc metric tables ([`7a89a7f`](https://github.com/datajoint/element-array-ephys/commit/7a89a7fc5819fa205bf332155f684d9b94529133)) + +* fix docstring error ([`9cc4545`](https://github.com/datajoint/element-array-ephys/commit/9cc4545681ec3da7d0a1b1230c06e12fbdd2e409)) + +* Fix typo ([`e0919ae`](https://github.com/datajoint/element-array-ephys/commit/e0919ae14670b1cce1341d216afe08c990fa1285)) + +* Update docs configuration ([`2dcca99`](https://github.com/datajoint/element-array-ephys/commit/2dcca99e0ed3d4e1d7ea14b44686f8d6a7f0bd5e)) + +* Fix for `PT_ratio` to `pt_ratio` ([`e4358a5`](https://github.com/datajoint/element-array-ephys/commit/e4358a50c152d940c663b691b74342570f518c30)) + +* Update kilosort_triggering.py ([`8035648`](https://github.com/datajoint/element-array-ephys/commit/8035648551e6545cd66f2f4d5911241226430302)) + +## v0.2.4 (2023-03-10) + +### Unknown + +* Merge pull request #137 from kabilar/main + +Update requirements for plotting widget ([`2fa46bd`](https://github.com/datajoint/element-array-ephys/commit/2fa46bd690c6bf4a22494dc2086c506f5ba48bf8)) + +* Update changelog ([`2eb359f`](https://github.com/datajoint/element-array-ephys/commit/2eb359f354b55235ae350dc89cf4f918449764c2)) + +* Update changelog ([`ac581b9`](https://github.com/datajoint/element-array-ephys/commit/ac581b9eafdb2848aecb2691509038ed9a9c9c13)) + +* Add dependency ([`00f4f6d`](https://github.com/datajoint/element-array-ephys/commit/00f4f6d9693e2c470b46e55341ee54f5c2f36df2)) + +* Add ipywidgets as dependency ([`6840069`](https://github.com/datajoint/element-array-ephys/commit/684006910b979b70fa7c9d9a5614a570da1159d2)) + +## v0.2.3 (2023-02-14) + +### Fix + +* fix: :bug: import from __future__ module + +this supports backward compability with typing ([`4d9ab28`](https://github.com/datajoint/element-array-ephys/commit/4d9ab28609b161be986cda2872778d7a403277f3)) + +### Unknown + +* Merge pull request #132 from CBroz1/main + +Add 0.2.3 release date ([`e39f9d3`](https://github.com/datajoint/element-array-ephys/commit/e39f9d30e23052f4d1bbf85bc7ca459d80e641c2)) + +* Add 0.2.3 release date ([`eaeef30`](https://github.com/datajoint/element-array-ephys/commit/eaeef306827799448050f0e96fa7d2c03ece750b)) + +* Merge pull request #125 from CBroz1/main + +Adjusting pyopenephys requirement for pypi publication ([`c7f92af`](https://github.com/datajoint/element-array-ephys/commit/c7f92af01da6e975bb25e63a472825e8491f1057)) + +* New pyopenephys version ([`f07bd44`](https://github.com/datajoint/element-array-ephys/commit/f07bd44abcaa4ac88cded20168b4e8f63b451563)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`905588f`](https://github.com/datajoint/element-array-ephys/commit/905588f5a3f03ae813a4ff25e202aa69b9e47ec7)) + +* Merge pull request #128 from ttngu207/main + +fix removing outdated files/directory ([`53f0312`](https://github.com/datajoint/element-array-ephys/commit/53f0312272ffbf7989c40b2ba91c9e7a33a81faa)) + +* Update kilosort_triggering.py ([`a34e437`](https://github.com/datajoint/element-array-ephys/commit/a34e437380868498975735e97561bb406a0cc70f)) + +* fix removing outdated files/directory ([`6fb65b3`](https://github.com/datajoint/element-array-ephys/commit/6fb65b376c25bd35dd4e665519c20e3ce8f33f4d)) + +* minor improvement ([`7c6dc37`](https://github.com/datajoint/element-array-ephys/commit/7c6dc374184a90e3211cef63bf0a4c31dd7a35fc)) + +* Merge pull request #126 from sidhulyalkar/main + +Fix multiple hash kilosort output issue ([`b619bd0`](https://github.com/datajoint/element-array-ephys/commit/b619bd05d77246808d61627eacaa4e6cad4aa69a)) + +* edit comment ([`f17f299`](https://github.com/datajoint/element-array-ephys/commit/f17f299f8a379b2e6de67c27c51e518d692f0f56)) + +* Fix issue where hash is changed(different paramset) and trying to rerun processing ([`7774492`](https://github.com/datajoint/element-array-ephys/commit/7774492e21fd361560393e5d2bde906adb256e37)) + +* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`152cc58`](https://github.com/datajoint/element-array-ephys/commit/152cc586e294de185aa799e50af9717b4b6948bf)) + +* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`2df6280`](https://github.com/datajoint/element-array-ephys/commit/2df6280b09c36ace534b11726220140afc6d2431)) + +* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`06568f4`](https://github.com/datajoint/element-array-ephys/commit/06568f445a8bc8da1b0c34eb457899626de75dba)) + +* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`e7f6060`](https://github.com/datajoint/element-array-ephys/commit/e7f6060467f02510558e7b86e9e664c7cbdbc38d)) + +* Merge branch 'run_kilosort' of https://github.com/sidhulyalkar/element-array-ephys ([`4e195c3`](https://github.com/datajoint/element-array-ephys/commit/4e195c3b173cfdb94371311c1d6be6babad7b75c)) + +* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`f5ca7e8`](https://github.com/datajoint/element-array-ephys/commit/f5ca7e87ad8bc51c0801c7b2504e0fb8092a3a08)) + +* Added Code of Conduct ([`195c61e`](https://github.com/datajoint/element-array-ephys/commit/195c61e8e825d90701b84b1c18fa204fa56c8bc3)) + +* Simplify import ([`47f6a07`](https://github.com/datajoint/element-array-ephys/commit/47f6a07ad030dff9997272a4f74abe7962593277)) + +* pyopenephys import workaround ([`2a742e6`](https://github.com/datajoint/element-array-ephys/commit/2a742e694326d2937a07587469730110f1f11b39)) + +* Merge pull request #124 from CBroz1/main + +Cleanup docstrings, add notebook render ([`d5b9586`](https://github.com/datajoint/element-array-ephys/commit/d5b95860977485e1020500d72f2bf18576a18aad)) + +* Apply suggestions from code review + +Co-authored-by: JaerongA <jaerong.ahn@datajoint.com> ([`4796056`](https://github.com/datajoint/element-array-ephys/commit/4796056c1bdb98039cc52e43a4491dc43f6bcfef)) + +* Merge pull request #3 from JaerongA/main + +fix: :bug: import from __future__ to support backward compatibility ([`fd94939`](https://github.com/datajoint/element-array-ephys/commit/fd94939eb518e8727706f3dff56ebc35bb3fcb5f)) + +* Merge branch 'main' into main ([`084ada2`](https://github.com/datajoint/element-array-ephys/commit/084ada258f9935f6d3636fd31c1a962b3be0a9aa)) + +* Adjust dependencies 2 ([`a28cf13`](https://github.com/datajoint/element-array-ephys/commit/a28cf13118f15fbec515171852027965b8c433ad)) + +* Adjust dependencies ([`45f846c`](https://github.com/datajoint/element-array-ephys/commit/45f846cf75004b9e5ea6e6580f6f16209515c6f0)) + +* Fix typing bug ([`888e7f7`](https://github.com/datajoint/element-array-ephys/commit/888e7f743fcfc10dc190c3020523aeb1547c8380)) + +* interface requirement to pip installable ([`9ff2e04`](https://github.com/datajoint/element-array-ephys/commit/9ff2e04cb879fe8aae0fe985a661f7fe8761f79b)) + +* add extras_require nwb install option to docs ([`8045879`](https://github.com/datajoint/element-array-ephys/commit/80458796ebda66e81312211677a0f4aa93100295)) + +* Version bump, changelog ([`9d03350`](https://github.com/datajoint/element-array-ephys/commit/9d0335047fe67e02ebf5ccc6da395d5bdbeda3df)) + +* Add extras_require for dev and nwb ([`e01683c`](https://github.com/datajoint/element-array-ephys/commit/e01683ca2241e457ccfa6f4d61914808400d663e)) + +* Spelling ([`56eb68a`](https://github.com/datajoint/element-array-ephys/commit/56eb68a96f1552aaeed967d4c46ccad45c8eabcd)) + +* Fix docstrings ([`0980242`](https://github.com/datajoint/element-array-ephys/commit/0980242d1502d09612b424ce7b9f06a250d11342)) + +* Add tutorial notebook renders ([`96bb6fa`](https://github.com/datajoint/element-array-ephys/commit/96bb6fa10207782e2d5249eda76007ebc453567d)) + +* More spellcheck; Markdown linting ([`64e7dc6`](https://github.com/datajoint/element-array-ephys/commit/64e7dc690c7cbf1f4051d4cbd3e5d29f3bfd9218)) + +* Update License 2023 ([`4ef0b6d`](https://github.com/datajoint/element-array-ephys/commit/4ef0b6db9a27fd7ee68fcc48744ee98981947156)) + +* Spellcheck pass ([`ea980e9`](https://github.com/datajoint/element-array-ephys/commit/ea980e9d35a7582ff1953651812e61a736931c9d)) + +* Remove unused import ([`b3c0786`](https://github.com/datajoint/element-array-ephys/commit/b3c0786329d6c60c00e092aa0f3a10e920970c20)) + +* blackify ([`0e5a1c6`](https://github.com/datajoint/element-array-ephys/commit/0e5a1c64d8e7913cdb70ef701c5e53a83225fbcf)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`b250f2d`](https://github.com/datajoint/element-array-ephys/commit/b250f2dd15395876196029789667250cf331a6ee)) + +* Merge branch 'main' of https://github.com/JaerongA/element-array-ephys ([`147550c`](https://github.com/datajoint/element-array-ephys/commit/147550c6fee8a6d510d877be627025f0c710aba8)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`06db0f8`](https://github.com/datajoint/element-array-ephys/commit/06db0f84f2b5a686a3fe87ef14d3be196e7861b3)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`0ec5e3d`](https://github.com/datajoint/element-array-ephys/commit/0ec5e3d135f62013083ce2799db927e23ad1d10e)) + +* Add nwb.py from run_kilosort branch ([`31e46cd`](https://github.com/datajoint/element-array-ephys/commit/31e46cd30f0467f43aca18ca450ec72667656417)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`5711a12`](https://github.com/datajoint/element-array-ephys/commit/5711a12ac2f2ed90abf8a27b9120c017c339090a)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`a8e93d1`](https://github.com/datajoint/element-array-ephys/commit/a8e93d162b1d70f0eb704973141198f750e52f76)) + +## v0.2.2 (2023-01-11) + +### Unknown + +* Merge pull request #122 from JaerongA/main + +Revert import order ([`eababbc`](https://github.com/datajoint/element-array-ephys/commit/eababbc4bb02ecc137641a19163dea8a5c8b6785)) + +* add back deleted version tags ([`bd0e76a`](https://github.com/datajoint/element-array-ephys/commit/bd0e76a685cb143bc631bc03a64df45123d65138)) + +* merge upstream & resolve conflicts ([`1feff92`](https://github.com/datajoint/element-array-ephys/commit/1feff92b61fb473a2c1464ed130fa2fa1b7f58df)) + +* Merge pull request #120 from kushalbakshi/main + +Docstring changes for docs API ([`623a381`](https://github.com/datajoint/element-array-ephys/commit/623a38112e6d7404421eece243de1d385faeb663)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`523f2e9`](https://github.com/datajoint/element-array-ephys/commit/523f2e97d3245f60c49f75496beae76d92bc074d)) + +* Update CHANGELOG to resolve merge conflicts ([`e613fc4`](https://github.com/datajoint/element-array-ephys/commit/e613fc45d34c83726d4bf032cb61c402a574c065)) + +* Docstring changes for docs API ([`87a7849`](https://github.com/datajoint/element-array-ephys/commit/87a7849a021ec4076624836a2b5d288448ada909)) + +* update CHANGELOG.md ([`5c7a772`](https://github.com/datajoint/element-array-ephys/commit/5c7a7722b46196a25e3cb85603a3487113dd6592)) + +* revert import order in __init__.py ([`956b96e`](https://github.com/datajoint/element-array-ephys/commit/956b96ec563b17912efe14d0764d6fedfe49f8a5)) + +* Add E402 in pre-commit-config ([`f4f283a`](https://github.com/datajoint/element-array-ephys/commit/f4f283a4209492e55dc82648434ecba55e05acfd)) + +## v0.2.1 (2023-01-09) + +### Chore + +* chore: :loud_sound: update CHANGELOG ([`2d43321`](https://github.com/datajoint/element-array-ephys/commit/2d4332189655eeffff7fd41fa071adcc2754ff16)) + +* chore: :rewind: revert formatting in concepts.md ([`c16b6bd`](https://github.com/datajoint/element-array-ephys/commit/c16b6bdcbadb97fae2b0c21d3dc8c13308fd4131)) + +* chore(deps): :pushpin: unpin plotly ([`8504b97`](https://github.com/datajoint/element-array-ephys/commit/8504b9724a129c617a8264cbbeb1c26c8a696d8e)) + +### Documentation + +* docs: :memo: add | update docstrings ([`4999d64`](https://github.com/datajoint/element-array-ephys/commit/4999d64980e4cf278f872159c7d327387939ee12)) + +* docs: :memo: name change & add docstring ([`d9c75c8`](https://github.com/datajoint/element-array-ephys/commit/d9c75c8ea425eb38dc8e714639ad341edf39cafd)) + +### Refactor + +* refactor: :pencil2: fix typos ([`efca82e`](https://github.com/datajoint/element-array-ephys/commit/efca82e352adee21c6979e94078c1c82b8b423aa)) + +* refactor(deps): :heavy_minus_sign: remove ibllib deps and add acorr func ([`c613164`](https://github.com/datajoint/element-array-ephys/commit/c613164ae90cac220c1726a9eb2e12f336f876db)) + +### Unknown + +* Merge pull request #116 from JaerongA/ephys_test + +modify build_electrodes function ([`0f518f1`](https://github.com/datajoint/element-array-ephys/commit/0f518f1b0cd60ffeb22a6345cadfbe0e72ecf2b3)) + +* Update element_array_ephys/probe.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`59c72c4`](https://github.com/datajoint/element-array-ephys/commit/59c72c4670abe6f0649eb829af14b289d605b98a)) + +* Update element_array_ephys/probe.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`c983fa8`](https://github.com/datajoint/element-array-ephys/commit/c983fa8967e5c0a20b2a7f7851d5622377ccb16e)) + +* Update element_array_ephys/probe.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`8e53f54`](https://github.com/datajoint/element-array-ephys/commit/8e53f5448b37dee63b678e3f641dcf505149c7ac)) + +* remove redundant type hinting ([`19c447a`](https://github.com/datajoint/element-array-ephys/commit/19c447af3ba77f8330ac7ace59e4e0e9e49fde52)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into ephys_test ([`52a06e9`](https://github.com/datajoint/element-array-ephys/commit/52a06e93b7591a01d7ead857eb96ae53dbbb8cfc)) + +* Merge pull request #118 from JaerongA/main + +Add pre-commit & update docstrings & various fixes ([`61cb23a`](https://github.com/datajoint/element-array-ephys/commit/61cb23a33be65df8d06d2e344548be8d160f25d2)) + +* pull upstream & resolve merge conflicts ([`26b6be9`](https://github.com/datajoint/element-array-ephys/commit/26b6be9788af03fbba53adb31447623d21ee43da)) + +* Merge branch 'datajoint:main' into main ([`c3ad36f`](https://github.com/datajoint/element-array-ephys/commit/c3ad36f8dfacfb3d58beaaee065c5d75d0a54b28)) + +* Merge pull request #121 from ttngu207/main + +parameterize run_CatGT step from parameters retrieved from `ClusteringParamSet` table ([`24df134`](https://github.com/datajoint/element-array-ephys/commit/24df134629819c17601eec7addf2ae4f359cc567)) + +* Update CHANGELOG.md ([`f5dff5c`](https://github.com/datajoint/element-array-ephys/commit/f5dff5cf04feae0d4c3c142fcc8769bb86d9c0a6)) + +* catGT checks and parameterizable ([`0ade344`](https://github.com/datajoint/element-array-ephys/commit/0ade344dc792ffce2dc25d412ea97a45b135c4d8)) + +* improve validate_file logic ([`63dbd12`](https://github.com/datajoint/element-array-ephys/commit/63dbd12011b1c9978da41664d83f4b36d3a42a19)) + +* update CHANGELOG.md ([`294d4f5`](https://github.com/datajoint/element-array-ephys/commit/294d4f5b8063f32bdc2f82858f10e7b8a0804e0d)) + +* Update element_array_ephys/version.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`b343c15`](https://github.com/datajoint/element-array-ephys/commit/b343c15b20919658b36e3ea0ddb3eef3f82dbf02)) + +* Apply suggestions from code review + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`4651707`](https://github.com/datajoint/element-array-ephys/commit/4651707843f3d78448563bbecc982592b99da035)) + +* Apply suggestions from code review + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`8920b00`](https://github.com/datajoint/element-array-ephys/commit/8920b00b617b2fc1932e0824740f5bd168715d47)) + +* Update CHANGELOG.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`b29e161`](https://github.com/datajoint/element-array-ephys/commit/b29e1613dd25280ab8e6745d2f9110e96287ec9f)) + +* Update CHANGELOG.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2889293`](https://github.com/datajoint/element-array-ephys/commit/28892939ba2b74aaa748ce4458e31f29e74f9f2a)) + +* Apply suggestions from code review + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`7e213c5`](https://github.com/datajoint/element-array-ephys/commit/7e213c5472b8830a0dedc7f2a727470e02bbfa51)) + +* Update setup.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`5649b61`](https://github.com/datajoint/element-array-ephys/commit/5649b61457ef8c74f1e3f3938870edeceea158c3)) + +* ✏️ fix typos ([`dc85370`](https://github.com/datajoint/element-array-ephys/commit/dc853709848062dc146fa21bf5fb1d11a81a4022)) + +* pull from upstream ([`bda30a3`](https://github.com/datajoint/element-array-ephys/commit/bda30a34214ccf47ac0ecd6fb0ab0bc9ff6101c1)) + +* Merge pull request #119 from CBroz1/qc + +Add QC dashboard ([`08d1291`](https://github.com/datajoint/element-array-ephys/commit/08d12914253cc7b4da1170db9dc233ba49cbc283)) + +* Remove interface 42 dependence ([`4f6c301`](https://github.com/datajoint/element-array-ephys/commit/4f6c30199ee2cec36dd86b5ddeb27062955f1f90)) + +* Cleanup ([`eed1eee`](https://github.com/datajoint/element-array-ephys/commit/eed1eeeb3dd23a33764bdde97b8e0f32ae245a8d)) + +* Cleanup, add comments ([`c31c53d`](https://github.com/datajoint/element-array-ephys/commit/c31c53da0f5cfe4a24221bb5cf73886915ff2ffc)) + +* Version bump + changelog ([`9eb34cd`](https://github.com/datajoint/element-array-ephys/commit/9eb34cdca25f271896c2e2c508db1cfb33d82373)) + +* Remove unused import ([`9989dc7`](https://github.com/datajoint/element-array-ephys/commit/9989dc741b8808e284a7eab3aa9c292cb1e1966b)) + +* Revert removed docstring item ([`ddeabc7`](https://github.com/datajoint/element-array-ephys/commit/ddeabc7b9d7ed11ade5ed0249ee5c2b4dc318e41)) + +* Run isort on qc.py ([`a65cbe4`](https://github.com/datajoint/element-array-ephys/commit/a65cbe49ed565c041f3f744c2d1b8eff2f6843a7)) + +* WIP: QC dashboard 2 ([`f997fd6`](https://github.com/datajoint/element-array-ephys/commit/f997fd6c82bddd33f6007dc3bea6ced29ed49bb2)) + +* WIP: QC dashboard ([`c873acf`](https://github.com/datajoint/element-array-ephys/commit/c873acfe9fc9a91eca2f4e6f45bd5c49a26380d6)) + +* update docstring ([`8561326`](https://github.com/datajoint/element-array-ephys/commit/8561326c3b2d2c97d5eb0a3f28dcd1d5647d4d2f)) + +* Merge pull request #3 from CBroz1/ja + +linter recommended changes, reduce linter exceptions ([`9a78c15`](https://github.com/datajoint/element-array-ephys/commit/9a78c15a5d553e18c19d1038b552a47ae399330b)) + +* Apply isort ([`904ccb9`](https://github.com/datajoint/element-array-ephys/commit/904ccb9ce2454276e005935bfee2d9b9cf0b181f)) + +* Apply hooks except isort ([`5645ebc`](https://github.com/datajoint/element-array-ephys/commit/5645ebc15cd29c005df36ee17cd316824ff50159)) + +* See details + +- Apply black +- Remove trailing whitespace +- Reduce flake8 exceptions +- Move imports to top +- Remove unused imports (e.g., re, pathlib, log_from_json) +- Add __all__ to init files specifying imports loaded +- Add `noqa: C901` selectively where complexity is high +- l -> line for _read_meta func in spikeglx.py +- give version a default value before loading ([`181677e`](https://github.com/datajoint/element-array-ephys/commit/181677e7622e1102679131974cf0fc567604ede1)) + +* fix docstrings ([`47de1d5`](https://github.com/datajoint/element-array-ephys/commit/47de1d5eac6b24608b20f74c9551b6c8040c9bd3)) + +* update concepts.md ([`bc95946`](https://github.com/datajoint/element-array-ephys/commit/bc95946656ebd5100b2e3c584b18b857451b3ab8)) + +* update version.py ([`6a997e1`](https://github.com/datajoint/element-array-ephys/commit/6a997e17aa56298b19d71c915a7116320b5d7ad1)) + +* pre-commit ignores docs, .github ([`e52c12e`](https://github.com/datajoint/element-array-ephys/commit/e52c12ed3509ce145853e709334043b8bd1c3272)) + +* update changelog.md ([`8f4ca3f`](https://github.com/datajoint/element-array-ephys/commit/8f4ca3fc216690a526392617cbc65a634da5f63a)) + +* add to ignored flake8 rules ([`16de049`](https://github.com/datajoint/element-array-ephys/commit/16de049e86b417895dd38f686a972b4af6df885e)) + +* import DataJointError ([`434e16a`](https://github.com/datajoint/element-array-ephys/commit/434e16a1013b48f057b0f19b7ee51fd63ac0cc36)) + +* update docstring ([`12098ce`](https://github.com/datajoint/element-array-ephys/commit/12098ceb5bb1b004998ff3e59d89a4e5c3b17c80)) + +* add requirements_dev.txt ([`d19f97f`](https://github.com/datajoint/element-array-ephys/commit/d19f97f22a15d609534796ae04257011fe518d35)) + +* add pre-commit-config ([`278e2f2`](https://github.com/datajoint/element-array-ephys/commit/278e2f2aeb462b3313abcfbaf93de2e206332fcb)) + +* Update element_array_ephys/ephys_report.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`b9fb872`](https://github.com/datajoint/element-array-ephys/commit/b9fb872621449710be8409239c1c0379cc59ed0c)) + +* Update element_array_ephys/ephys_report.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`61c739f`](https://github.com/datajoint/element-array-ephys/commit/61c739f0ca0c6cfb74b204919485cf36dc417a8c)) + +* update docstrings ([`816a0ed`](https://github.com/datajoint/element-array-ephys/commit/816a0ed2322cda25e32b8171499b6de327ca9d98)) + +* figures downloaded to temp folder ([`643637d`](https://github.com/datajoint/element-array-ephys/commit/643637d816bb69f4faf792b0c8b31718ec670cfe)) + +* Merge pull request #117 from ttngu207/main + +remove typing - keep consistency and maintain backward compatibility prior to python 3.9 ([`678cd95`](https://github.com/datajoint/element-array-ephys/commit/678cd951928d9e499f81252336dfcc47966e10f5)) + +* remove typing - keep consistency and maintain backward compatibility prior to python 3.9 ([`44a2582`](https://github.com/datajoint/element-array-ephys/commit/44a2582e0dbdb6b820101676de4cad41b6deb9ec)) + +* Merge pull request #114 from ttngu207/main + +Various fixes and improvements - no new feature ([`3635a4a`](https://github.com/datajoint/element-array-ephys/commit/3635a4abc1cc8fd7d82b0ca6c09ecd9e66a4be5d)) + +* add missing CHANGELOG link ([`c2c3482`](https://github.com/datajoint/element-array-ephys/commit/c2c34828c321ca141e1331db5e7f5da408364ba4)) + +* BLACK formatting ([`cbc3b62`](https://github.com/datajoint/element-array-ephys/commit/cbc3b6286fcb9bd0b13bc803e7739c1b0b9dfd34)) + +* BLACK formatting ([`6b375f9`](https://github.com/datajoint/element-array-ephys/commit/6b375f998089114c051a829d9541d8acfa8a5fbe)) + +* Update CHANGELOG.md ([`72e784b`](https://github.com/datajoint/element-array-ephys/commit/72e784b6a2e202da18024f5026df002768901cbf)) + +* BLACK formatting ([`b6ce2f7`](https://github.com/datajoint/element-array-ephys/commit/b6ce2f7dc5d3fb37f3f277399794c095d58ebc0a)) + +* Merge branch 'datajoint:main' into main ([`731c103`](https://github.com/datajoint/element-array-ephys/commit/731c10313c4b01da9bfb440227d48c4118600dd0)) + +* Merge pull request #115 from iamamutt/main + +Remove ibllib dependency ([`561df39`](https://github.com/datajoint/element-array-ephys/commit/561df399a01a113346b8b2c9619fc7f98b953414)) + +* Merge pull request #1 from JaerongA/ephys_test + +fix module name & add docstrings ([`dd6e215`](https://github.com/datajoint/element-array-ephys/commit/dd6e215df2072aece593d2a8d02e67d3fed3fd47)) + +* Update CHANGELOG.md + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`ed5bbb9`](https://github.com/datajoint/element-array-ephys/commit/ed5bbb9e86d09870a5497a035de7e3eddf84d74c)) + +* update changelog ([`d07a93f`](https://github.com/datajoint/element-array-ephys/commit/d07a93ff5f8ca734847a412cbad4747e4c861383)) + +* bugfix - fix "probe_indices" in single probe recording ([`2676a16`](https://github.com/datajoint/element-array-ephys/commit/2676a161325f18dae13be73849556557a8cea79d)) + +* handles single probe recording in "Neuropix-PXI" format ([`1859085`](https://github.com/datajoint/element-array-ephys/commit/1859085e7133f5971862e0c74eb1176b83e7e426)) + +* safeguard in creating/inserting probe types upon `probe` activation ([`c2d9f47`](https://github.com/datajoint/element-array-ephys/commit/c2d9f47c0871b16ad82570fdd80ef802011f97b8)) + +* bugfix logging median subtraction duration ([`9ec904f`](https://github.com/datajoint/element-array-ephys/commit/9ec904f6697e4cddca068884613e42bbda092098)) + +* bugfix in updating median-subtraction duration ([`8ec0f71`](https://github.com/datajoint/element-array-ephys/commit/8ec0f713e461678c3288bfec083a0f762e885651)) + +* update duration for `median_subtraction` step ([`bd2ff1c`](https://github.com/datajoint/element-array-ephys/commit/bd2ff1cfe25bbc32c7b5ffc63add008d34bdd655)) + +* update docstring ([`68fa77c`](https://github.com/datajoint/element-array-ephys/commit/68fa77c2ed49c9603bccd64fcb75b92ac5b642e8)) + +* Apply suggestions from code review + +Co-authored-by: Thinh Nguyen <thinh@datajoint.com> ([`cd9501c`](https://github.com/datajoint/element-array-ephys/commit/cd9501c1c773df08f4eea740ef312431e9ec5a1c)) + +* fix docstring & formatting ([`9fc7477`](https://github.com/datajoint/element-array-ephys/commit/9fc7477abc7b975c3435d54a83063a248c50d42e)) + +* fix docstring in probe.py ([`7958727`](https://github.com/datajoint/element-array-ephys/commit/7958727e42bd5e235112fd3d14ef3435d8c6dcc5)) + +* feat: ([`ff3fca0`](https://github.com/datajoint/element-array-ephys/commit/ff3fca0ddc94f3921a06d8593005a2115ccdb930)) + +* remove proj() ([`496c210`](https://github.com/datajoint/element-array-ephys/commit/496c210a14acf6d5de4f3b67049a1af8738579cf)) + +* revert: :adhesive_bandage: revert to um ([`5a7f068`](https://github.com/datajoint/element-array-ephys/commit/5a7f06868a3ddff6616e7010d2cbceae944544aa)) + +* add probe_type in electrode_layouts ([`633f745`](https://github.com/datajoint/element-array-ephys/commit/633f7455c8dba47986b361380af6e73b88595b1b)) + +* spacing defaults to none ([`8f6e280`](https://github.com/datajoint/element-array-ephys/commit/8f6e28083d2e132eadef0e3cdb2b625fb43077bf)) + +* Update element_array_ephys/probe.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`704f6ed`](https://github.com/datajoint/element-array-ephys/commit/704f6ed65c818037d33617cb424157f0d305fa5f)) + +* remove mapping dict ([`48ab889`](https://github.com/datajoint/element-array-ephys/commit/48ab889c0f3ac48813065d2243346fdf0e23e41d)) + +* col_count_per_shank ([`be3bd11`](https://github.com/datajoint/element-array-ephys/commit/be3bd11f720822565b59d80278f2eaa8b2cfc6cb)) + +* site_count_per_shank ([`cb21f61`](https://github.com/datajoint/element-array-ephys/commit/cb21f6154d737d2b59b2b5fec2690a8a247bad6f)) + +* modify build_electrodes function ([`1c7160c`](https://github.com/datajoint/element-array-ephys/commit/1c7160c33b1e316e49826a58dcf842790a18c94a)) + +* Merge branch 'main' of https://github.com/iamamutt/element-array-ephys into ephys_test ([`8a65635`](https://github.com/datajoint/element-array-ephys/commit/8a65635dc78952e4227ae8c89a168dcc09a2b192)) + +* Merge remote-tracking branch 'upstream/main' ([`b3a07b8`](https://github.com/datajoint/element-array-ephys/commit/b3a07b82d32f2d89d3a72ff4523e46c0518be20c)) + +* Merge pull request #113 from tdincer/main + +Update README.md ([`85a1f0a`](https://github.com/datajoint/element-array-ephys/commit/85a1f0a238102122a0a38aed450648ce8477e4b6)) + +* Update README.md ([`7a5f843`](https://github.com/datajoint/element-array-ephys/commit/7a5f843568f0ebcc86681b8d802b494087d6e520)) + +* Merge branch 'datajoint:main' into main ([`8dd5f29`](https://github.com/datajoint/element-array-ephys/commit/8dd5f29d4ce3070e430a31b8ab1f20cb800742b4)) + +* Merge pull request #111 from kabilar/main + +Fix for cicd and other ux fixes ([`2e63edc`](https://github.com/datajoint/element-array-ephys/commit/2e63edc39de704ed77650f21643d50f54c433214)) + +* Move datajoint diagrams ([`8f006c0`](https://github.com/datajoint/element-array-ephys/commit/8f006c040fd82c4250e58bce220f064ff640aca2)) + +* Remove empty spaces to compare with ephys modules ([`863d9b1`](https://github.com/datajoint/element-array-ephys/commit/863d9b1152525dbb129c71dab40e2e22183a06d1)) + +* Fix bug ([`8731def`](https://github.com/datajoint/element-array-ephys/commit/8731def1d3edc1ba1f6d1735984c05d93903b35d)) + +* Fix diagram ([`566bc64`](https://github.com/datajoint/element-array-ephys/commit/566bc64d739fb0ec5f62b01887284aa6988f198b)) + +* Update text ([`48900e8`](https://github.com/datajoint/element-array-ephys/commit/48900e87cae10f1daf2d2f8e3e33520354b65f88)) + +* Merge pull request #110 from CBroz1/docs2 + +Add diagram text layer ([`34912bf`](https://github.com/datajoint/element-array-ephys/commit/34912bf062f738c9535042a8d9a55f5f1d3c74a5)) + +* Add diagram text layer ([`638ebc4`](https://github.com/datajoint/element-array-ephys/commit/638ebc4f23b9f1a7a5edc8c039a0e24dac30cc1d)) + +* Merge pull request #109 from CBroz1/docs2 + +Docs2 ([`23bf669`](https://github.com/datajoint/element-array-ephys/commit/23bf66956ca094e053ff3f8b22b612f0e018d6d7)) + +* Update diagram ([`07f0733`](https://github.com/datajoint/element-array-ephys/commit/07f0733ef00c0bf855c886a9571336768b8f51ff)) + +* Add diagram ([`38cc7ab`](https://github.com/datajoint/element-array-ephys/commit/38cc7ab4d0678d60cac03729332f6c548fc4c4fd)) + +* Update logo/styling. Hard wrap ([`4d22a16`](https://github.com/datajoint/element-array-ephys/commit/4d22a169094c437b7a80a92c51b92651ec3d5042)) + +* datatype clarification ([`c353400`](https://github.com/datajoint/element-array-ephys/commit/c353400e967e942e81148f60ac77c792ff68eccf)) + +* fix docstring typo ([`50d3dd1`](https://github.com/datajoint/element-array-ephys/commit/50d3dd1fcfe6a4ffd75933e04c5e7cb28564b83e)) + +* Merge pull request #107 from kushalbakshi/main + +Updated CHANGELOG ([`2af7fc5`](https://github.com/datajoint/element-array-ephys/commit/2af7fc55a1f5348694a53c39cc338855f2bf5ae2)) + +## v0.2.0 (2022-11-03) + +### Feature + +* feat: :sparkles: Merge branch 'plotly' into no_curation_plot ([`06c1064`](https://github.com/datajoint/element-array-ephys/commit/06c1064dd890d68afc90a9cc3aca3961edab2691)) + +* feat: :sparkles: add a report schema and plotting png figures ([`66743cc`](https://github.com/datajoint/element-array-ephys/commit/66743cc3dcdc22a35ddd24f7c694278c2903957a)) + +### Fix + +* fix: :bug: use to_plotly_json() instead of to_json() ([`69b2796`](https://github.com/datajoint/element-array-ephys/commit/69b2796285690bcb68d2e3185608a6e33172c0ea)) + +### Unknown + +* Updated CHANGELOG ([`bc5afcc`](https://github.com/datajoint/element-array-ephys/commit/bc5afcc97b23b8032722985158e69b6b01eb34f2)) + +* Merge pull request #102 from kushalbakshi/main + +Added docs + docstrings ([`e04841b`](https://github.com/datajoint/element-array-ephys/commit/e04841b965700551046b29efd98d27577a5c4495)) + +* Update element_array_ephys/ephys_precluster.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`a73fcd2`](https://github.com/datajoint/element-array-ephys/commit/a73fcd2df9ee12860b2d548a45118ac6f5384b51)) + +* Update element_array_ephys/ephys_no_curation.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`36b8161`](https://github.com/datajoint/element-array-ephys/commit/36b8161359cded22c65ca863ebf22327652aeffd)) + +* Update element_array_ephys/ephys_chronic.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`e6e0b21`](https://github.com/datajoint/element-array-ephys/commit/e6e0b219924416df1012a3bb474f7c31f0207c61)) + +* Update element_array_ephys/ephys_acute.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`f2d550e`](https://github.com/datajoint/element-array-ephys/commit/f2d550e6903bf3e48802cb705bcccd48d1f9f765)) + +* Version and CHANGELOG 0.2.1 -> 0.2.0 ([`2e0cffe`](https://github.com/datajoint/element-array-ephys/commit/2e0cffefc57a2ef2fd7fcb84c1a41796cd31d7bd)) + +* update CHANGELOG ([`473ca98`](https://github.com/datajoint/element-array-ephys/commit/473ca98f2ecfc351fd17ab5d21bf35d25a269bfb)) + +* Updated CHANGELOG and version ([`8b4f4fa`](https://github.com/datajoint/element-array-ephys/commit/8b4f4fac82b6ccb3d95332d62b1a8b317139f8cc)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`dc52a6e`](https://github.com/datajoint/element-array-ephys/commit/dc52a6eba8944aecc239e254d1b421079213bab0)) + +* Merge pull request #106 from ttngu207/main + +add to changelog, bump version ([`89f1d7c`](https://github.com/datajoint/element-array-ephys/commit/89f1d7c7ffdcf49e52548222554a64de96f3e2ea)) + +* Apply suggestions from code review + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> +Co-authored-by: Tolga Dincer <tolgadincer@gmail.com> ([`8f3e76e`](https://github.com/datajoint/element-array-ephys/commit/8f3e76edf254f18be9235d378ad5f839d50a4a51)) + +* add to changelog, bump version ([`02fb576`](https://github.com/datajoint/element-array-ephys/commit/02fb5765d6f71ba558d2e22fc72e970076a875c8)) + +* Merge pull request #103 from JaerongA/no_curation_plot + +add ephys_report schema for data visualizations ([`3e9b675`](https://github.com/datajoint/element-array-ephys/commit/3e9b675581ed16b2704836a94cf1e55195e35f67)) + +* Update element_array_ephys/plotting/widget.py + +Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`d0c6797`](https://github.com/datajoint/element-array-ephys/commit/d0c67970e18e435d3c7e298a0d59d6ce42536bee)) + +* f-string formatting ([`14a970f`](https://github.com/datajoint/element-array-ephys/commit/14a970f2e500288f54b33176905dd7037b85b962)) + +* improve clarity and reduce complexity for activating and using the `ephys_report` module ([`5d41039`](https://github.com/datajoint/element-array-ephys/commit/5d41039c7898ce73fa683de6d426636a6099121a)) + +* Merge branch 'no_curation_plot' of https://github.com/JaerongA/element-array-ephys into no_curation_plot ([`d7fb6df`](https://github.com/datajoint/element-array-ephys/commit/d7fb6dff697ed7fa8f89b422f4621dca67fd5886)) + +* Apply suggestions from code review + +Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`e773e94`](https://github.com/datajoint/element-array-ephys/commit/e773e94f22b9334b3586c8d7f7083afa699009bf)) + +* plot only the shank with the peak electrode ([`ce9abcc`](https://github.com/datajoint/element-array-ephys/commit/ce9abcc16ad02c0b54e122e4ed3c72cb33b1074c)) + +* new method for getting x, y spacing between sites ([`2eb9540`](https://github.com/datajoint/element-array-ephys/commit/2eb9540a436d08d35f1fd9cd7df205ef3a126afe)) + +* fix code to calculate site spacing & figure reformatting ([`b48aaf0`](https://github.com/datajoint/element-array-ephys/commit/b48aaf078390954ad5be39b29fc84fa6e7db340d)) + +* add vscode in .gitignore ([`48e0744`](https://github.com/datajoint/element-array-ephys/commit/48e07444f0239b6479cb27dc40d59300b11e0691)) + +* fixed typo & black formatting ([`f1d6a87`](https://github.com/datajoint/element-array-ephys/commit/f1d6a87df42d89ed7b33acd299cddcd35c4d1f71)) + +* clean up import & remove wrong documentation ([`dc9b293`](https://github.com/datajoint/element-array-ephys/commit/dc9b2932ea9a0cd9e87c57940d0960a5f84d15bc)) + +* remove zip function ([`357cda9`](https://github.com/datajoint/element-array-ephys/commit/357cda99405b2605dad4d5b76142de8d0c9db6f2)) + +* remove zip function ([`290feca`](https://github.com/datajoint/element-array-ephys/commit/290fecab3299a32eb1650b7f58090027f6fab87e)) + +* add skip_duplicates=True in probe.Probe.insert ([`dab5dfe`](https://github.com/datajoint/element-array-ephys/commit/dab5dfe0e89f9cc668ce024403839c7b21ae0e0f)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`5d50894`](https://github.com/datajoint/element-array-ephys/commit/5d5089499c9fce8a260b3a51381fa3a254e48ec6)) + +* Update element_array_ephys/ephys_chronic.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`35d2044`](https://github.com/datajoint/element-array-ephys/commit/35d2044abe97fbcf3351c01b6088007382a74f8d)) + +* pip plotly version ([`0e86529`](https://github.com/datajoint/element-array-ephys/commit/0e8652921ef7f97c64376dcc3367f3ec1e68cd7c)) + +* widget takes the activated ephys schema as an input ([`e302cf8`](https://github.com/datajoint/element-array-ephys/commit/e302cf87cf98520d467808998ae548ae44580676)) + +* add ephys widget ([`fb48db6`](https://github.com/datajoint/element-array-ephys/commit/fb48db69b07c15e05b3ff9a3ebd2c0e7890a8c58)) + +* update unit & probe widget to be on the same widget ([`a41d7ba`](https://github.com/datajoint/element-array-ephys/commit/a41d7ba6c8567727ef5d3ba9edea48cddcfe525a)) + +* add widget event handler ([`d2b07d5`](https://github.com/datajoint/element-array-ephys/commit/d2b07d531ab2ac91f496b604084d4f5fe7279424)) + +* add ipywidget ([`cdaa931`](https://github.com/datajoint/element-array-ephys/commit/cdaa931804356bd71f7e4642e2e380a5efb19b69)) + +* adjust the figure size ([`00052e5`](https://github.com/datajoint/element-array-ephys/commit/00052e5746351a1194a861c4524d12eb0579e93b)) + +* fix naming convention ([`aab4ead`](https://github.com/datajoint/element-array-ephys/commit/aab4eadb914fa9302615ead69d8ef08545fa8775)) + +* update the probe widget ([`b3c3f5b`](https://github.com/datajoint/element-array-ephys/commit/b3c3f5bf9249105182c9bea10e392cb05ad1011b)) + +* update dependencies ([`75e15d2`](https://github.com/datajoint/element-array-ephys/commit/75e15d2f3154eeca5e69c95bb5a3961a1c657a5a)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into no_curation_plot ([`5027d17`](https://github.com/datajoint/element-array-ephys/commit/5027d17537c131d6e4959d5fad2c7e1466ca5db3)) + +* resolve circular dependency & reformatting ([`2b69ab5`](https://github.com/datajoint/element-array-ephys/commit/2b69ab53e5415813414c967342f50ed1afb00b30)) + +* add shank in ProbeLevelReport & formatting ([`36ce21f`](https://github.com/datajoint/element-array-ephys/commit/36ce21f3809b2a890d0603a8a054dad77ddf6e1c)) + +* Merge pull request #1 from ttngu207/ephys_report + +resolve "activation" and circular dependency ([`f191214`](https://github.com/datajoint/element-array-ephys/commit/f19121470d02ebe9f0ca77f4e09f9e3bb71ddd8e)) + +* Update element_array_ephys/plotting/unit_level.py ([`36f128a`](https://github.com/datajoint/element-array-ephys/commit/36f128a411853e6e96614aac372b5c392ce307e0)) + +* resolve "activation" and circular dependency ([`4693728`](https://github.com/datajoint/element-array-ephys/commit/46937287c9c4b05efd2f6d121b6ea0142a2aa845)) + +* change report to ephys_report ([`f7ea1b8`](https://github.com/datajoint/element-array-ephys/commit/f7ea1b81281be6c39a39e1e8900c291b439de920)) + +* convert the unit report figures to plotly ([`275b479`](https://github.com/datajoint/element-array-ephys/commit/275b479da06a7ac7b5a188f6d1e57cf39a2bee69)) + +* Merge branch 'main' into no-curation ([`6af6206`](https://github.com/datajoint/element-array-ephys/commit/6af6206e7a14adddbde98288a4e6c460a19516bb)) + +* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`ab7b78c`](https://github.com/datajoint/element-array-ephys/commit/ab7b78c1dea17e173d0365c647066d1a02c2e22f)) + +* Update element_array_ephys/ephys_precluster.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`f6b93d7`](https://github.com/datajoint/element-array-ephys/commit/f6b93d75768053636cbc495a1600a19b9535f0b7)) + +* Update element_array_ephys/ephys_no_curation.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`486d938`](https://github.com/datajoint/element-array-ephys/commit/486d938a5b06913927d5eef1c593b0ee8365153f)) + +* Update element_array_ephys/ephys_chronic.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`ab1a067`](https://github.com/datajoint/element-array-ephys/commit/ab1a0675b52d29abd9900d4286041d49e35e953a)) + +* Roadmap updated in concepts.md ([`aec85cc`](https://github.com/datajoint/element-array-ephys/commit/aec85cc35f3a17b217a13012041fcb5c4492e75d)) + +* `enum` attribute description updated ([`4f28ad1`](https://github.com/datajoint/element-array-ephys/commit/4f28ad15a10449d19bcafb98e5b592521ac8769d)) + +* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`c5d4882`](https://github.com/datajoint/element-array-ephys/commit/c5d488239edd0fc3490c41c86108009662aff629)) + +* Update docs/mkdocs.yaml + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`2355a46`](https://github.com/datajoint/element-array-ephys/commit/2355a465c6ee5611c0a8b04339af45d782b48496)) + +* Sentence case in concepts.md ([`72024ea`](https://github.com/datajoint/element-array-ephys/commit/72024ea3ea4de13738244736b710d08cdc00014b)) + +* References added to concepts + version change ([`2528e6d`](https://github.com/datajoint/element-array-ephys/commit/2528e6dea5788f5929b31eebc86930da6c03d44d)) + +* Minor formatting update to docstrings ([`5083b78`](https://github.com/datajoint/element-array-ephys/commit/5083b78b2ad78c852d5bea4747a95a16f5b663e8)) + +* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`b030951`](https://github.com/datajoint/element-array-ephys/commit/b03095182e50e8700a7f78f815db9b7df401ad2e)) + +* Update element_array_ephys/ephys_precluster.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`38f2410`](https://github.com/datajoint/element-array-ephys/commit/38f2410bed0f98de478d7f1795a08e065bedd4eb)) + +* Update element_array_ephys/ephys_precluster.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`ca1441f`](https://github.com/datajoint/element-array-ephys/commit/ca1441f24446c89e63d708deef9318de6f83a543)) + +* Update element_array_ephys/probe.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`98f8a8c`](https://github.com/datajoint/element-array-ephys/commit/98f8a8c2e62a06baaa80e61d634cff7bf25acc0b)) + +* Updated docstrings after code review ([`17d9e4a`](https://github.com/datajoint/element-array-ephys/commit/17d9e4aed104ee047c77557591c187259ef6575a)) + +* Changes applied from code review ([`c75e2d7`](https://github.com/datajoint/element-array-ephys/commit/c75e2d71e485336050ad94a1016bd9af1cc0e432)) + +* Update docs/mkdocs.yaml + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`37d6a60`](https://github.com/datajoint/element-array-ephys/commit/37d6a6098632044474a9c2caae97ece16c4b2c68)) + +* Automated versioning added ([`7431058`](https://github.com/datajoint/element-array-ephys/commit/74310589f076f2f44283c0d3ff3cd46ac0de54e0)) + +* science-team to concepts + hard wrap test ([`b0e755f`](https://github.com/datajoint/element-array-ephys/commit/b0e755f6cc14313eb83e2a2e09524f751d0c5ff0)) + +* Docstrings added ([`0feee75`](https://github.com/datajoint/element-array-ephys/commit/0feee75270d8a4a6e99507937f2aa530bd4b7dcd)) + +* Updated docs based on DLC merge ([`01322db`](https://github.com/datajoint/element-array-ephys/commit/01322db39dc06a540799142788c3173287323347)) + +* Updates mirroring DLC ([`0111dcd`](https://github.com/datajoint/element-array-ephys/commit/0111dcdf3ab3e7e1d3b95a2e4a8fcc7ac3ca3cef)) + +* Fixes to docs after local testing + docstrings ([`7135ce1`](https://github.com/datajoint/element-array-ephys/commit/7135ce19de7080b5c4fe11f1785f3ce46433c924)) + +* Updated docstrings + Dockerfiles ([`e7423e2`](https://github.com/datajoint/element-array-ephys/commit/e7423e2d330c17a2e68f7aee4f06237495d30fd3)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`afb64eb`](https://github.com/datajoint/element-array-ephys/commit/afb64ebc93d33a3aa06407e7b96f98925f4c5fad)) + +* Merge pull request #94 from datajoint/run_kilosort + +`run_kilosort` -> `main` ([`db75e4d`](https://github.com/datajoint/element-array-ephys/commit/db75e4dbd3770ce54b43d16ed652e338d440eab2)) + +* Merge pull request #97 from ttngu207/no-curation + +pull from main - add QC ([`03e2d5f`](https://github.com/datajoint/element-array-ephys/commit/03e2d5fcfbfa2daa15d2962676903ede80daa497)) + +* specify lfp filepath as input ([`10d12a6`](https://github.com/datajoint/element-array-ephys/commit/10d12a696057ef44da373cbad0029350fc60761e)) + +* smart handling of finished median subtraction step ([`34e59cc`](https://github.com/datajoint/element-array-ephys/commit/34e59cc20ca31472ddba07e190c940ca432b6b99)) + +* modify `extracted_data_directory` path - same as ks output path ([`296f7c6`](https://github.com/datajoint/element-array-ephys/commit/296f7c672296a2502eb84bbc03e7138e42da00bf)) + +* bugfix QC ingestion ([`2d76efc`](https://github.com/datajoint/element-array-ephys/commit/2d76efcccb4f47569a4de9f4c4a2ca215b45146c)) + +* bugfix - remove `%` in attributes' comments ([`d008b05`](https://github.com/datajoint/element-array-ephys/commit/d008b051f0d4752b9582d642e5f948d84386b902)) + +* add QC to `ephys_no_curation` ([`db448f7`](https://github.com/datajoint/element-array-ephys/commit/db448f72a1f7bc091edfd3c29dabadeea71c6d24)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into no-curation ([`ef486ed`](https://github.com/datajoint/element-array-ephys/commit/ef486ed2e686ed1c59f4364d8570dc52c96c2347)) + +* median subtraction on a copied data file ([`32bff24`](https://github.com/datajoint/element-array-ephys/commit/32bff24a9f325673fc8606fee2d916a894000e80)) + +* Merge pull request #93 from CBroz1/rk + +Ensure Path type for get_spikeglx_meta_filepath ([`a738ee7`](https://github.com/datajoint/element-array-ephys/commit/a738ee74a4bd593a43332676ed5bcfa005434319)) + +* Ensure Path type for get_spikeglx_meta_filepath ([`0e94252`](https://github.com/datajoint/element-array-ephys/commit/0e942523d300ac01d56ae8419cb62e696d3abae2)) + +* Merge pull request #91 from ttngu207/no-curation + +bugfix for catgt ([`6757ef7`](https://github.com/datajoint/element-array-ephys/commit/6757ef738b95ad8c7cc7e305a7a9a0f96de42fe9)) + +* Update kilosort_triggering.py ([`71d87ae`](https://github.com/datajoint/element-array-ephys/commit/71d87aecfe37d6a435f5cc819ee389ed621d9772)) + +* Merge pull request #90 from ttngu207/no-curation + +enable `CatGT` ([`23ca7ca`](https://github.com/datajoint/element-array-ephys/commit/23ca7ca6d2474daab71d8a00c4c492d4395a2667)) + +* improve error handling ([`f60ba3d`](https://github.com/datajoint/element-array-ephys/commit/f60ba3d6a0c0776d1f7b97d02b9d1608a5788a0e)) + +* bugfix - path search for catgt output ([`c33d1b0`](https://github.com/datajoint/element-array-ephys/commit/c33d1b02811dbaf7e492da60e70819d5102798ff)) + +* `missing_ok` arg only available in python 3.8+ ([`35da39b`](https://github.com/datajoint/element-array-ephys/commit/35da39bb1c5c80c18ade40fed296cfb47b158123)) + +* bugfix ([`fbdbe24`](https://github.com/datajoint/element-array-ephys/commit/fbdbe24adc55c6fb51f4e524760528f27589f37b)) + +* bugfix ([`aed42ca`](https://github.com/datajoint/element-array-ephys/commit/aed42ca1dddd15a6b2a03c329f9278122846c55b)) + +* enable catgt run ([`987231b`](https://github.com/datajoint/element-array-ephys/commit/987231be7d386351a043901d19f55f76b6bbf90d)) + +* bugfix in running catgt ([`5905392`](https://github.com/datajoint/element-array-ephys/commit/59053923a9968df6f2ab7f90d9324c7502dcaf0e)) + +* Update kilosort_triggering.py ([`f9f18d0`](https://github.com/datajoint/element-array-ephys/commit/f9f18d0864615f24420b3feb982b68dfd85dd6d7)) + +* Merge pull request #89 from ttngu207/no-curation + +implement data compression using `mtscomp` for openephys and spikeglx for neuropixels data ([`c153e7f`](https://github.com/datajoint/element-array-ephys/commit/c153e7f4f0875a08f097e6af653bb6e8248c5c08)) + +* garbage collect openephys objects ([`d8aea04`](https://github.com/datajoint/element-array-ephys/commit/d8aea041f35ba2b92e8f619f4b3729123852116a)) + +* garbage collect openephys objects ([`97f3d21`](https://github.com/datajoint/element-array-ephys/commit/97f3d21ce0ae6e5827070d093893ab836665998a)) + +* implement data compression using `mtscomp` for openephys and spikeglx neuropixels data ([`b2bd0ee`](https://github.com/datajoint/element-array-ephys/commit/b2bd0eeab31a63d95fcaf84aaafb436289da8838)) + +* Merge pull request #88 from ttngu207/no-curation + +overall code cleanup/improvement for more robust and optimal kilosort run ([`ad8436e`](https://github.com/datajoint/element-array-ephys/commit/ad8436e8535ab34fdb24efea7e0aa9bc5d2d6178)) + +* Merge branch 'no-curation' of https://github.com/ttngu207/element-array-ephys into no-curation ([`fd331bd`](https://github.com/datajoint/element-array-ephys/commit/fd331bdefc036eb9e08fad83b8ffba41dc037ec7)) + +* remove space escaping character ([`b71b459`](https://github.com/datajoint/element-array-ephys/commit/b71b459744b212251d0685b7bebb82d859fc8723)) + +* improve kilosort calls, handle spaces in paths ([`0c77826`](https://github.com/datajoint/element-array-ephys/commit/0c77826af1141d0e2d5828736252b33e56734af5)) + +* improve error message ([`a3c5c2f`](https://github.com/datajoint/element-array-ephys/commit/a3c5c2fb9c03e3b6df293ed0e8fb58f17a20ef78)) + +* bugfix, match new implementation for openephys ([`3f1ee37`](https://github.com/datajoint/element-array-ephys/commit/3f1ee371bec5c68a2c9838082df87b6368074ebd)) + +* code cleanup, minor bugfix ([`b97566e`](https://github.com/datajoint/element-array-ephys/commit/b97566e6b833d610377e93cd21a08a0272f3a075)) + +* improve logic for running kilosort modules in a resumable fashion ([`9a59e57`](https://github.com/datajoint/element-array-ephys/commit/9a59e574dd25176a8da9b8142d6e87aeed3c5f74)) + +* Merge pull request #86 from CBroz1/rk + +Changes for codebook deployment ([`d9c3887`](https://github.com/datajoint/element-array-ephys/commit/d9c38873e3371dd045bb90e199a0a58caa5e701b)) + +* WIP: version bump pynwb to 2.0 ([`0221848`](https://github.com/datajoint/element-array-ephys/commit/0221848b6466daba6553e1e1a5967a2dee08c954)) + +* Merge branch 'run_kilosort' of https://github.com/datajoint/element-array-ephys into rk ([`13d74ad`](https://github.com/datajoint/element-array-ephys/commit/13d74ad73efac3cd5f94f1ffe374c762a95c936d)) + +* Merge pull request #77 from ttngu207/no-curation + +more robust loading of openephys format ([`d298b07`](https://github.com/datajoint/element-array-ephys/commit/d298b07c0ba7805a5efeee2d9db703cc35913925)) + +* more robust loading of openephys format ([`67039ac`](https://github.com/datajoint/element-array-ephys/commit/67039ac51bd87754a610053bf8e85d0a958f42be)) + +* Merge pull request #73 from ttngu207/no-curation + +update openephys loader - handling open ephys v0.6.0 ([`9272ee6`](https://github.com/datajoint/element-array-ephys/commit/9272ee64417248b7afe85786ae60366710c0f0ff)) + +* added loading of electrode location for new openephys format ([`4e367d7`](https://github.com/datajoint/element-array-ephys/commit/4e367d72dc9928b69553613844773ad38c98ea91)) + +* update open ephys loader to handle "STREAM" in latest format ([`07604e2`](https://github.com/datajoint/element-array-ephys/commit/07604e24c185421566122024b3d8b8c3f60b4475)) + +* Merge pull request #70 from ttngu207/no-curation + +bugfix for LFP electrode mapping ([`747c15f`](https://github.com/datajoint/element-array-ephys/commit/747c15f5b481790e6e36131dfee8ab932eeb6220)) + +* bugfix for LFP electrode mapping ([`f11e016`](https://github.com/datajoint/element-array-ephys/commit/f11e0161e949bc572f0ac7ee2e0b46096fa00351)) + +* `kilosort2` also as part of the `contents` for ClusteringMethod ([`f4b917d`](https://github.com/datajoint/element-array-ephys/commit/f4b917d0eada17b3c54c4922ef2295f368569855)) + +* Update requirements.txt ([`a578d85`](https://github.com/datajoint/element-array-ephys/commit/a578d851db1cc439b6c1bc380bdb6ee6d6af4789)) + +* Add contact info to Code of Conduct ([`70e0b1c`](https://github.com/datajoint/element-array-ephys/commit/70e0b1c3f18d7e150957e5ad943a9f3c76d130e3)) + +* Add Code of Conduct ([`e43e5d5`](https://github.com/datajoint/element-array-ephys/commit/e43e5d5b88ca82c1be14b7392c56397a9162b7c6)) + +* Issue #63 ([`d102f6f`](https://github.com/datajoint/element-array-ephys/commit/d102f6fa5bb79c8ddf23e3c6df4e200cb3c02a25)) + +* Merge branch 'rk' of https://github.com/CBroz1/element-array-ephys into rk ([`bd6d7e4`](https://github.com/datajoint/element-array-ephys/commit/bd6d7e471ea322e4675dbf23bd253e53ac057ca1)) + +* Update README.md + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`720e355`](https://github.com/datajoint/element-array-ephys/commit/720e355c9201f79d1638a15099e325ef0dda76cd)) + +* Issue #11 ([`4a3e0bf`](https://github.com/datajoint/element-array-ephys/commit/4a3e0bf0332abcab167e7fc65c99de3f98f39e2f)) + +* WIP: nwb line length, Readme mention of ([`e1c9b35`](https://github.com/datajoint/element-array-ephys/commit/e1c9b355d1270acd6ccd144106b173e68c0e6654)) + +* WIP: NWB fix - prevent SQL err by restricting key ([`b62fd12`](https://github.com/datajoint/element-array-ephys/commit/b62fd12f5d94ccd70aa5d816ac5c2320b7d8520d)) + +* WIP: nwb bugfix ([`49bba8a`](https://github.com/datajoint/element-array-ephys/commit/49bba8a2f03fd1e0a74910bf360dc1b68308f261)) + +* Merge pull request #69 from ttngu207/no-curation + +Add no-curation version and run kilosort analysis ([`364f80e`](https://github.com/datajoint/element-array-ephys/commit/364f80ed261e6297c20b14a3915ba29b6beb7cb4)) + +* Merge remote-tracking branch 'upstream/run_kilosort' into no-curation ([`ddd4095`](https://github.com/datajoint/element-array-ephys/commit/ddd409543cc531ef11c1961fb50ffc1a5a516772)) + +* Merge branch 'no-curation' of https://github.com/ttngu207/element-array-ephys into no-curation ([`7fecff1`](https://github.com/datajoint/element-array-ephys/commit/7fecff10fd4b530af00e1e408b723fab93787fd4)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`af864d7`](https://github.com/datajoint/element-array-ephys/commit/af864d73d5925458d0955d6c0556366202ad88ee)) + +* added assertion - safeguard against failed loading of continuous.dat ([`47babf3`](https://github.com/datajoint/element-array-ephys/commit/47babf37c9556d1d496cd4d7687baea7d57cd7eb)) + +* handles new probe naming in latest Open Ephys format ([`cd5fe70`](https://github.com/datajoint/element-array-ephys/commit/cd5fe70261b48142f4406087eacdde96c141de99)) + +* update openephys loader - handle new open ephys format ([`11a12ba`](https://github.com/datajoint/element-array-ephys/commit/11a12ba00a169618d510f967132ab738f24151e4)) + +* Update openephys.py ([`85c7c8b`](https://github.com/datajoint/element-array-ephys/commit/85c7c8ba12154fff8695b1a66d9db96440e8bb08)) + +* configurable `paramset_idx` for auto ClusteringTask generation ([`39c8579`](https://github.com/datajoint/element-array-ephys/commit/39c8579821aec505fffaf40c8e41d833ec9f775f)) + +* bugfix ([`769de13`](https://github.com/datajoint/element-array-ephys/commit/769de136cfd91e3ae57228d657af405a302ebeaf)) + +* Update __init__.py ([`e0a9a4f`](https://github.com/datajoint/element-array-ephys/commit/e0a9a4f38fe74e790e1d1c70cf3b7ab1d68f8f8a)) + +* delete nwb export - rename `ephys` module -> `ephys_acute` ([`c2f8aea`](https://github.com/datajoint/element-array-ephys/commit/c2f8aeaa7aad48c602484b9358a569d2b54d69c4)) + +* remove unintended prototyping work ([`83649f5`](https://github.com/datajoint/element-array-ephys/commit/83649f5edf2ddb4788e5b361ec7153238ecbfee4)) + +* handle older open ephys format for single probe ([`bdcfa46`](https://github.com/datajoint/element-array-ephys/commit/bdcfa46ea7d1d55af48b22556b9943c9d44b5fff)) + +* Update requirements.txt ([`f0b3d4a`](https://github.com/datajoint/element-array-ephys/commit/f0b3d4a7b6c77c6bb295ebb558458caacaa1543a)) + +* Update requirements.txt ([`7320f9f`](https://github.com/datajoint/element-array-ephys/commit/7320f9f6b966548145b4a39b5bbf87b0d5c8d6e3)) + +* Update requirements.txt ([`cb1a041`](https://github.com/datajoint/element-array-ephys/commit/cb1a0419d8707808b1f1e0599358be6c42a00bd2)) + +* rename `sess_dir` -> `session_dir` ([`03cab02`](https://github.com/datajoint/element-array-ephys/commit/03cab02ee709e94b151621d00b12e455953dccfb)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`f4052cc`](https://github.com/datajoint/element-array-ephys/commit/f4052cc5d079e6ea065ce6f6aab34b462250ba39)) + +* name tweak ([`779b2fb`](https://github.com/datajoint/element-array-ephys/commit/779b2fb95b277768cb15046a4eea4e7138e23749)) + +* minor bugfix ([`d66368c`](https://github.com/datajoint/element-array-ephys/commit/d66368c7f016e244b4a81c1aa2f6b7d7bbc4d15d)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into no-curation ([`b4832ea`](https://github.com/datajoint/element-array-ephys/commit/b4832ea82416a7d76675811b2936c062185514f8)) + +* DEPRECATING NWB EXPORT ([`4951b39`](https://github.com/datajoint/element-array-ephys/commit/4951b396a1b97a9dda3aacdc974c23f64d8bfd9f)) + +* Merge pull request #4 from A-Baji/no-curation + +all three ephys files up to date ([`9dd6b42`](https://github.com/datajoint/element-array-ephys/commit/9dd6b42fc77e18089a419e10457013c86380db0c)) + +* applied requested changes ([`c56cd18`](https://github.com/datajoint/element-array-ephys/commit/c56cd188357f84da89c72c56d74b6bdf6913f11e)) + +* all three ephys files up to date ([`f2881ca`](https://github.com/datajoint/element-array-ephys/commit/f2881ca0d2c61dfcdeff9794f6fe26b1ecb6a066)) + +* Merge pull request #3 from ttngu207/nwb-export + +bugfix in assigning unit electrode indices ([`f9a4754`](https://github.com/datajoint/element-array-ephys/commit/f9a4754355fe556c6a7ae28f068556568a9966a1)) + +* bugfix in assigning unit electrode indices ([`31bba8c`](https://github.com/datajoint/element-array-ephys/commit/31bba8ca4061f6225ea111649929a0e2eb942615)) + +* include probe as part of the electrode_group name for uniqueness ([`aa47c8a`](https://github.com/datajoint/element-array-ephys/commit/aa47c8a761e1149fd386a1e2ca910670f24e18b7)) + +* version bump ([`8657d58`](https://github.com/datajoint/element-array-ephys/commit/8657d58557cc755292d1be4638b4f8bf3c4af1ed)) + +* fix NWB export - null `waveforms` - back to version 0.1.0b1 ([`dae36d1`](https://github.com/datajoint/element-array-ephys/commit/dae36d1f4793047b1a1ac684fc0f37f529d1c9aa)) + +* version bump ([`6107e8e`](https://github.com/datajoint/element-array-ephys/commit/6107e8e22f29b858a2ef139ea5890b7cfa86b80b)) + +* NWB export fix, specifying which ephys module ([`8dff08b`](https://github.com/datajoint/element-array-ephys/commit/8dff08b5c094c0867cea242738f8a9e1ffbca6ba)) + +* handles multi-probe for older OE version ([`02c4b67`](https://github.com/datajoint/element-array-ephys/commit/02c4b671c2f58aaf19a966af266d2e56d25f8a86)) + +* openephys loader - handles signalchain and processor as single element or list ([`2022e91`](https://github.com/datajoint/element-array-ephys/commit/2022e91079ecb021b5a3b0cc0771631206692c9c)) + +* for pykilosort's probe, provide both Nchan and NchanTOT ([`1c39568`](https://github.com/datajoint/element-array-ephys/commit/1c39568045b65ac4de3597e3d539311efdb033c1)) + +* handle missing `sample_rate` from pykilosort params.py ([`142459d`](https://github.com/datajoint/element-array-ephys/commit/142459d6f21d096b1490e301cfbbeb22ac370e6c)) + +* bugfix in triggering pykilosort ([`02069c9`](https://github.com/datajoint/element-array-ephys/commit/02069c94b8b088a2f16b58f5c7224f17a3920cd5)) + +* clusters extraction - check `cluster_group.tsv` and `cluster_KSLabel.tsv` ([`da10c66`](https://github.com/datajoint/element-array-ephys/commit/da10c66caf6a99f0a0f63f89c94136f5470983c7)) + +* handles extraction of `connected` channels for NP_PROBE format in OpenEphys ([`43d6614`](https://github.com/datajoint/element-array-ephys/commit/43d6614f8b4a36aebcb81ed49500d890f35de1f7)) + +* bugfix, timedelta as seconds ([`adffe34`](https://github.com/datajoint/element-array-ephys/commit/adffe34ea52ed2f0550a2063b843b72d67d90ef2)) + +* bugfix - extract recording datetime (instead of using experiment datetime) ([`c213325`](https://github.com/datajoint/element-array-ephys/commit/c21332543e23f647e0198e9eba0881f256e85a87)) + +* bugfix openephys loader ([`0d16e7e`](https://github.com/datajoint/element-array-ephys/commit/0d16e7ed61ec7911d97db7077b6f11498709cf73)) + +* search recording channels for Open Ephys based on channel names ([`d105419`](https://github.com/datajoint/element-array-ephys/commit/d1054195a63608f041a62dd49d9134fae80dc89b)) + +* bugfix in electrode sites design for Neuropixels UHD probe ([`f55a6a7`](https://github.com/datajoint/element-array-ephys/commit/f55a6a7dfd373efe1405aeea050a0f8fe0b9e6f8)) + +* supporting `neuropixels UHD` in `ephys.EphysRecording` ([`db3027b`](https://github.com/datajoint/element-array-ephys/commit/db3027b7b48eca7e281cfa93c23b546545457ed4)) + +* handles format differences between npx1 vs 3A ([`e325a30`](https://github.com/datajoint/element-array-ephys/commit/e325a30d1ab4879077d59d773b50cb05998168bd)) + +* fix package requirement formatting error ([`af2b18b`](https://github.com/datajoint/element-array-ephys/commit/af2b18ba88287f234ab0ade5c76210a57eed718b)) + +* update openephys loader ([`4250220`](https://github.com/datajoint/element-array-ephys/commit/4250220c7933f47135208c31a5d0e6c46a2d8518)) + +* minor bugfix in running pykilosort ([`b6f8f99`](https://github.com/datajoint/element-array-ephys/commit/b6f8f99dbc8b2bc56ca2f6484d1f8f09f8056944)) + +* using fork of pyopenephys ([`96931a4`](https://github.com/datajoint/element-array-ephys/commit/96931a4fdfcaebcee2853dac3835e7fdf954524f)) + +* use_C_waves=False for OpenEphys ([`81d99c8`](https://github.com/datajoint/element-array-ephys/commit/81d99c8c4c6901daa23d01e19e760d3b2d737a6f)) + +* first prototype for pykilosort ([`819ff19`](https://github.com/datajoint/element-array-ephys/commit/819ff193f326e950e6f06dc4ed4785a4ba96be0b)) + +* triggering kilosort analysis for open-ephys ([`df599fb`](https://github.com/datajoint/element-array-ephys/commit/df599fbe88e0ffc4694c33479c1247c22f66760e)) + +* add `neuropixels UHD` probe type ([`ddc3b94`](https://github.com/datajoint/element-array-ephys/commit/ddc3b9429b53ea6d7e5889171884522e7a05dbad)) + +* specify additional recording-info as part of the `params` ([`58b5984`](https://github.com/datajoint/element-array-ephys/commit/58b598473ad6c83c8966176d484a0f23c8056a6b)) + +* bugfix for running kilosort for Open Ephys data ([`199a2ba`](https://github.com/datajoint/element-array-ephys/commit/199a2baf43eadc1028961991173d5b010d31bc39)) + +* first prototype for running the ecephys_pipeline with OpenEphys ([`49ca0be`](https://github.com/datajoint/element-array-ephys/commit/49ca0beded17dd3d613b498223cbced3ce5480f1)) + +* add nwb export to `no-curation` ephys ([`b25f065`](https://github.com/datajoint/element-array-ephys/commit/b25f065f64735727ddae3e5e6ef907ba7368bfb9)) + +* Merge pull request #2 from ttngu207/nwb-export + +Nwb export ([`3ebdf23`](https://github.com/datajoint/element-array-ephys/commit/3ebdf236cb6f20cf0458f21b4358ca5b8b13c958)) + +* Update nwb.py ([`19616ef`](https://github.com/datajoint/element-array-ephys/commit/19616ef695d2546bfd441b32ea0df4a668488392)) + +* handle NWB export with multiple curated clusterings from one session ([`d07f830`](https://github.com/datajoint/element-array-ephys/commit/d07f830dc9384193164919399fb57605b3ea96c7)) + +* added NWB export ([`f740aef`](https://github.com/datajoint/element-array-ephys/commit/f740aef79c0b87a0e3b951c58e36daf701705195)) + +* minor bugfix ([`09c1e60`](https://github.com/datajoint/element-array-ephys/commit/09c1e6072dc681898e5edf9e8e866e9519ac76bd)) + +* stylistic improvements, addressing code review comments ([`e8ffe17`](https://github.com/datajoint/element-array-ephys/commit/e8ffe17711ad66bbf5011aef9bcff3f7ed2afe76)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`74e3ead`](https://github.com/datajoint/element-array-ephys/commit/74e3eadc0c722bef43901f075434142314604077)) + +* check `ap.bin` file validity before triggering kilosort (based on filesize) ([`beaf765`](https://github.com/datajoint/element-array-ephys/commit/beaf7651cffa67e1bc8a10b19cad49dde8e6530e)) + +* duration for each module run ([`19b704b`](https://github.com/datajoint/element-array-ephys/commit/19b704b70af25ff8e3b40d28ac1858748007c9bb)) + +* bugfix logging for kilosort triggering ([`f34e95d`](https://github.com/datajoint/element-array-ephys/commit/f34e95dcda3ba8379cfe9e860277d69f1336db37)) + +* minor bugfix ([`55bec01`](https://github.com/datajoint/element-array-ephys/commit/55bec0122f077bbb4b8ac90516da6d0a78dc8630)) + +* stage tracking and resumable kilosort run ([`408532c`](https://github.com/datajoint/element-array-ephys/commit/408532cf9b685b9a57f59831ba3fd16d0982ea97)) + +* minor cleanup ([`dc7ddd9`](https://github.com/datajoint/element-array-ephys/commit/dc7ddd912de849b8b63335bf33e700a630d117db)) + +* improve clusteringtask and waveform ingestion routine ([`c2ee64f`](https://github.com/datajoint/element-array-ephys/commit/c2ee64f52e6ce1be7062584dbd90129bae6cb891)) + +* new version 0.1.0b1 ([`67341d8`](https://github.com/datajoint/element-array-ephys/commit/67341d8f5470622e6c4e58a0b7f3ae3989c281a7)) + +* Update kilosort.py ([`0f0c212`](https://github.com/datajoint/element-array-ephys/commit/0f0c21249de2c81cef480df219f3c005a62f3b78)) + +* bugfix - no dir created ([`044c389`](https://github.com/datajoint/element-array-ephys/commit/044c389fae07540621a36af6568a35199cb3006a)) + +* add debugging lines ([`b9f4e92`](https://github.com/datajoint/element-array-ephys/commit/b9f4e9208b80f113b70f66d46fd6d4424dde4ec0)) + +* log the folder creation ([`ae966aa`](https://github.com/datajoint/element-array-ephys/commit/ae966aa9f55d7b835995c9d5a2b1e39f03e3f4ea)) + +* bugfix, convert path to string ([`94aade7`](https://github.com/datajoint/element-array-ephys/commit/94aade7212775ccc275979e05d17cb195bb665de)) + +* bugfix ([`28c4452`](https://github.com/datajoint/element-array-ephys/commit/28c445279f310822260c5df78f906c7bf77a3764)) + +* updating `kilosort_repository` depending on which KSVer to be used ([`38c5be6`](https://github.com/datajoint/element-array-ephys/commit/38c5be6fd8d1d1225c61c28eecabaf311e605694)) + +* include `clustering_method` into the calculation of `param_set_hash` ([`acdab12`](https://github.com/datajoint/element-array-ephys/commit/acdab125acfc62792fa4fa18ad0ba403d16a5da8)) + +* make variable naming consistent ([`a0ea9f7`](https://github.com/datajoint/element-array-ephys/commit/a0ea9f70c8dec59c9d415bd9de2e219ea69d0e81)) + +* add kilosort 2.5 as default content ([`a6cae12`](https://github.com/datajoint/element-array-ephys/commit/a6cae1291534e929c451d422da7083241f3418f9)) + +* minor bugfix ([`69c5e51`](https://github.com/datajoint/element-array-ephys/commit/69c5e5144c4210bd2248c354b56c6ba1bc4f6a47)) + +* change default `noise_template_use_rf` to False ([`c593baf`](https://github.com/datajoint/element-array-ephys/commit/c593bafbac84334c1e388fe96817494085878aed)) + +* missing generate module json ([`375e437`](https://github.com/datajoint/element-array-ephys/commit/375e437861d33791147d2913a8ea94d8031c12d6)) + +* bugfix ([`d63561f`](https://github.com/datajoint/element-array-ephys/commit/d63561f74ff7dbf1bc87922f74b5f55ad0bd5cd6)) + +* handle cases where `fileTimeSecs` is not available ([`6788180`](https://github.com/datajoint/element-array-ephys/commit/6788180682f8d2ff4ee3bdc0a6a01dd61814c67f)) + +* bugfix in triggering ecephys_spike_sorting ([`6bf0eb1`](https://github.com/datajoint/element-array-ephys/commit/6bf0eb100e0e5480e8824644ff1c3b638e889c24)) + +* minor tweak/improvements in kilosort triggering ([`f699ce7`](https://github.com/datajoint/element-array-ephys/commit/f699ce7e3af7c387579f133c763046f6e55517f4)) + +* Update kilosort_trigger.py ([`dd01fd2`](https://github.com/datajoint/element-array-ephys/commit/dd01fd2158d3f09f314e705b2c08f5e1b4205085)) + +* flag to create spike sorting output dir ([`6c646bb`](https://github.com/datajoint/element-array-ephys/commit/6c646bbcc2437f00c8de4f99aa1d6738c5acd09f)) + +* fix missing `clustering_method` ([`5cdc994`](https://github.com/datajoint/element-array-ephys/commit/5cdc994f47387f7935c028a1fd38e59e7d4c31e3)) + +* handles a weird windows/unix path incompatibility (even with pathlib) ([`ba28637`](https://github.com/datajoint/element-array-ephys/commit/ba28637496fbea77207a81fae3e6a287c56b494a)) + +* Merge branch 'no-curation' of https://github.com/ttngu207/element-array-ephys into no-curation ([`a24bd1a`](https://github.com/datajoint/element-array-ephys/commit/a24bd1a700d7d8fed61c6c0f5e51c2482cbc5bbf)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`149ef3f`](https://github.com/datajoint/element-array-ephys/commit/149ef3f4ab5294399d0553acec05b00b8d79226b)) + +* typo fix ([`9f98195`](https://github.com/datajoint/element-array-ephys/commit/9f981951d9222af132a51a85267a2c278f893f27)) + +* minor stylistic improvements ([`a9326ea`](https://github.com/datajoint/element-array-ephys/commit/a9326eaa015c46875829dbf92d6494aa5c0a3d85)) + +* remove `_required_packages_paths` ([`60105da`](https://github.com/datajoint/element-array-ephys/commit/60105da78fcc5e55be961c7c79fdb206b072de26)) + +* triggering Kilosort with ecephys_spike_sorting package ([`047bfa1`](https://github.com/datajoint/element-array-ephys/commit/047bfa1cd33fadefe42e9f395f119c97e894d1d1)) + +* automate ClusteringTask insertion ([`0d56456`](https://github.com/datajoint/element-array-ephys/commit/0d5645668f18d8b9011e46817bd0f31dda73a088)) + +* bugfix ([`a7e7554`](https://github.com/datajoint/element-array-ephys/commit/a7e755481215b67917b9678c5c795634db775c03)) + +* Update ephys_no_curation.py ([`70e93b3`](https://github.com/datajoint/element-array-ephys/commit/70e93b381c1f5fcb45b01a647b5be0fc4fbbbdf0)) + +* using `find_full_path` for session dir as well - improve robustness ([`5420ae0`](https://github.com/datajoint/element-array-ephys/commit/5420ae05ddf63c56e729f4229547fe5df99b3d58)) + +* no-curation, store processed data in user-specified `processed_data_dir` if provided ([`4397dd7`](https://github.com/datajoint/element-array-ephys/commit/4397dd7217b4595dc7fef498e6db456373ce50df)) + +* helper for `ProbeInsertion` - `auto_generate_entries(session_key)` ([`de84ce0`](https://github.com/datajoint/element-array-ephys/commit/de84ce0529c13eed5d1c1199062663f6a3888af2)) + +* improve kilosort loading routine - add `validate()` method ([`b7c0845`](https://github.com/datajoint/element-array-ephys/commit/b7c0845bc0514f3f435d79f6ba6fff86693b166a)) + +* minor bug fix ([`adfad95`](https://github.com/datajoint/element-array-ephys/commit/adfad9528d17689714d11b1ac1710d6f1a74756a)) + +* make `clustering_output_dir` user-input optional, auto infer ([`590310e`](https://github.com/datajoint/element-array-ephys/commit/590310ea8fc0829e60fd1113d99e83e75c78142d)) + +* remove `Curation` ([`a39a9b1`](https://github.com/datajoint/element-array-ephys/commit/a39a9b1b456c5b6fd49c2faa43f5d551e5f7901c)) + +* copied `ephys` to `ephys-no-curation`, added `recording_duration`, make ([`042cc46`](https://github.com/datajoint/element-array-ephys/commit/042cc460f48429e3b7c20eb01d49861d01357192)) + +* Update README ([`cdb9182`](https://github.com/datajoint/element-array-ephys/commit/cdb9182880dcca7f1070a2e2554d513488911cb2)) + +* Populated .md files ([`d8fca5b`](https://github.com/datajoint/element-array-ephys/commit/d8fca5bac61e8aacd3a24cffe8c6641e76512a05)) + +* Merge pull request #96 from ttngu207/main + +bugfix - remove % in attributes' comments AND add QC to `ephys_precluster` ([`4a6bc31`](https://github.com/datajoint/element-array-ephys/commit/4a6bc31e026de1869292e018efc5a817d01969e5)) + +* add QC to `ephys_precluster` ([`e21302f`](https://github.com/datajoint/element-array-ephys/commit/e21302f5a4c80b27aa4e66bf51e99f33153c4ebf)) + +* bugfix - remove `%` in attributes' comments ([`57a1c1d`](https://github.com/datajoint/element-array-ephys/commit/57a1c1d7d067f683e321656f669ad1c14be25fbe)) + +* Merge pull request #87 from ttngu207/main + +QC metrics ([`54c8413`](https://github.com/datajoint/element-array-ephys/commit/54c84137bbe55841b0e0db6079e0259d5240390f)) + +* Update CHANGELOG.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`7ce35ab`](https://github.com/datajoint/element-array-ephys/commit/7ce35ab8f47de621d5c0df3831c86c050a13b886)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`100cf84`](https://github.com/datajoint/element-array-ephys/commit/100cf84cbb5a8e59ae98b57b63aa7ec789669a34)) + +* apply PR review's suggestions ([`0a94aa9`](https://github.com/datajoint/element-array-ephys/commit/0a94aa9b2519e8168b297bcb7f050ed3894774e3)) + +* Merge branch 'datajoint:main' into main ([`b126801`](https://github.com/datajoint/element-array-ephys/commit/b126801d6bc876465e47fcb10f7fc19aeb215183)) + +* bump version, add to CHANGELOG ([`c250857`](https://github.com/datajoint/element-array-ephys/commit/c25085792607a23021607d5c35bbde7d2890a32f)) + +* Apply suggestions from code review ([`35e8193`](https://github.com/datajoint/element-array-ephys/commit/35e8193fbc3d199fbf99932623bbafaa371fca12)) + +* code cleanup ([`7f948f7`](https://github.com/datajoint/element-array-ephys/commit/7f948f70e19b13f32d9e6ce6c178f34d6a5db665)) + +* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys ([`65554dc`](https://github.com/datajoint/element-array-ephys/commit/65554dc51486b1154c0f9bb844e6dfe18e2774cd)) + +* Merge branch 'datajoint:main' into main ([`d884f01`](https://github.com/datajoint/element-array-ephys/commit/d884f01349358c82a9cc15414b7ba660f9fa2bad)) + +* add QC metrics ([`1773e23`](https://github.com/datajoint/element-array-ephys/commit/1773e23329852c96c8ba92c96144ea07e2b07036)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`4ab2a6c`](https://github.com/datajoint/element-array-ephys/commit/4ab2a6c9544e653efc50380c6117ca8f30facc5e)) + +* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into main ([`b29b6b0`](https://github.com/datajoint/element-array-ephys/commit/b29b6b0ba0f715e00b378fbaf25e0af4e94373aa)) + +* Merge pull request #95 from guzman-raphael/docs + +Documentation revamp ([`efedd37`](https://github.com/datajoint/element-array-ephys/commit/efedd37c5bb4798ad7ba7975d0506490e440f9f6)) + +* Update with recent styling updates. ([`c73c7b5`](https://github.com/datajoint/element-array-ephys/commit/c73c7b59f3ad1103673e7e30b0becaa49f7c3a9e)) + +* Remove unneeded comment. ([`0a8a67c`](https://github.com/datajoint/element-array-ephys/commit/0a8a67cf9c44b54c06d16293d1f64c5da755d1a0)) + +* Upgrade documentation to latest design and automation. ([`a304d31`](https://github.com/datajoint/element-array-ephys/commit/a304d316d3fb0df4e186c68b9a20ffcfd4e80457)) + +* Create u24_element_release_call.yml ([`3b98067`](https://github.com/datajoint/element-array-ephys/commit/3b98067cbdd4a617f6c7de4b21ed475d953866dc)) + +* Create u24_element_tag_to_release.yml ([`ddd6a18`](https://github.com/datajoint/element-array-ephys/commit/ddd6a189bc787dcf4e8174cb10e1e762ff912598)) + +* Create u24_element_before_release.yml ([`ac9035d`](https://github.com/datajoint/element-array-ephys/commit/ac9035dfb6dea9d24aecf11487794b00ca0c8b12)) + +* Merge pull request #84 from A-Baji/documentation + +Create documentation template ([`22c95a8`](https://github.com/datajoint/element-array-ephys/commit/22c95a8dee0ac264ed990d8a785b8b1e25e4ef41)) + +* code review updates ([`87b4e40`](https://github.com/datajoint/element-array-ephys/commit/87b4e40c839f8e888d2c31a789c4297289ac4582)) + +* update deploy-docs wf condition ([`9ff6089`](https://github.com/datajoint/element-array-ephys/commit/9ff6089e361156cec3184709c5d72fee4582efc2)) + +* enable wf ([`9b56f01`](https://github.com/datajoint/element-array-ephys/commit/9b56f0144d0c1d9035e2be94cd52e24e11bab477)) + +* fix logos ([`4a61ff2`](https://github.com/datajoint/element-array-ephys/commit/4a61ff294ace685edc71618cf5c0bf034ad11e4d)) + +* enable wf ([`6e0b460`](https://github.com/datajoint/element-array-ephys/commit/6e0b460055d60bad9c68a649f07ee928d54d5dee)) + +* disable wf ([`25c4876`](https://github.com/datajoint/element-array-ephys/commit/25c4876a6ee0205fd6387eaeff88e71d28e23ded)) + +* add permalin, fix mobile color, update logo ([`9ef29f2`](https://github.com/datajoint/element-array-ephys/commit/9ef29f26bfa8687d2f3c651e1368d50c1b27212f)) + +* github actions ([`3e3a943`](https://github.com/datajoint/element-array-ephys/commit/3e3a94353219989be282f70b68c22c53af4481b6)) + +* fix landing page nav link ([`fdbe144`](https://github.com/datajoint/element-array-ephys/commit/fdbe1441235894a4b21697555004c2b577af0ce1)) + +* open link in new tab example ([`d8a1607`](https://github.com/datajoint/element-array-ephys/commit/d8a1607fba5c3244185db51cb593314706f75eac)) + +* update override ([`7cfd746`](https://github.com/datajoint/element-array-ephys/commit/7cfd74643f751d38525f77d17524fd5664a85e10)) + +* fix navigation override ([`88c5ed1`](https://github.com/datajoint/element-array-ephys/commit/88c5ed1037e6d5a156e038383975e68d0ec8d18b)) + +* cleanup navigation layout ([`d0e7abb`](https://github.com/datajoint/element-array-ephys/commit/d0e7abbfe476aa45edecdb0c50573274ab44d0c6)) + +* add datajoint social link ([`052e589`](https://github.com/datajoint/element-array-ephys/commit/052e589ee403e0435b53d32f3c4f498b736f191c)) + +* change light mode footer color ([`111fcf5`](https://github.com/datajoint/element-array-ephys/commit/111fcf51899c4ee6602e308067a6a9e3f566d48b)) + +* re enable wfs ([`3c2226d`](https://github.com/datajoint/element-array-ephys/commit/3c2226d89f8aff13c0c5f0bef196944b80694ef0)) + +* disable wfs ([`7def7f3`](https://github.com/datajoint/element-array-ephys/commit/7def7f3364d1472a09fd9bec74a74eb2b7758d89)) + +* change dark theme ([`2dca304`](https://github.com/datajoint/element-array-ephys/commit/2dca304aacf1ba89f464703c3fed390438ad74fb)) + +* dark mode tweak ([`26a2818`](https://github.com/datajoint/element-array-ephys/commit/26a2818e4885750928761a2d2db0884e5bdad4ab)) + +* update source block dark mode color ([`0c889e2`](https://github.com/datajoint/element-array-ephys/commit/0c889e2e8a9183ce48cad4c22654239d92e7dba4)) + +* tweak docstring example ([`a0e223a`](https://github.com/datajoint/element-array-ephys/commit/a0e223aec75c626d05786657b4f2b45b1b31a9d9)) + +* add social links to footer ([`656242e`](https://github.com/datajoint/element-array-ephys/commit/656242ebddfaccae726d664fa2b4e4c7b6603f0f)) + +* landing page nav link ([`b5b8bab`](https://github.com/datajoint/element-array-ephys/commit/b5b8bab6d362e37d796dfb5531a65d9cb35b0eb6)) + +* re enable wf ([`109965b`](https://github.com/datajoint/element-array-ephys/commit/109965b504047147203647815b1c87f07d043c62)) + +* disable other wf ([`a7b3d11`](https://github.com/datajoint/element-array-ephys/commit/a7b3d116c969875c20adcd1e33142f34e67f133e)) + +* apply suggestions from review ([`9f5753c`](https://github.com/datajoint/element-array-ephys/commit/9f5753c1deba2ea7ac21cdfb08be63031b2d258b)) + +* disable mike install ([`d1ee89b`](https://github.com/datajoint/element-array-ephys/commit/d1ee89b8e097a521d45647d7413c1c635b1ed6ae)) + +* re enable other gh action wf ([`9b70fa1`](https://github.com/datajoint/element-array-ephys/commit/9b70fa1d7103b340dda95d9b3b21da7c8a540679)) + +* disable other gh action wf ([`f1b1868`](https://github.com/datajoint/element-array-ephys/commit/f1b186835cb0bf1a7e98f731efb3ff42d336c4b0)) + +* move docker files to docs/ ([`33d6fcc`](https://github.com/datajoint/element-array-ephys/commit/33d6fcc39b52e7da1e698945fbd991bf8660f434)) + +* comment cleanup ([`5a936af`](https://github.com/datajoint/element-array-ephys/commit/5a936afd88b006948fb89cc39e625429139e3db2)) + +* add mike workflow example ([`03b6dc3`](https://github.com/datajoint/element-array-ephys/commit/03b6dc395c8168aac8f616daf59971c38872685d)) + +* add mike for future use ([`6f7eedf`](https://github.com/datajoint/element-array-ephys/commit/6f7eedf4ca16b065e6e7b2e3ec6c061ba6b8608e)) + +* re enable other wf jobs ([`9bdb418`](https://github.com/datajoint/element-array-ephys/commit/9bdb4187d8a9c72f8c414147a37875200d72b2cc)) + +* add missing dependencies ([`5c44e80`](https://github.com/datajoint/element-array-ephys/commit/5c44e8018882d66b7f73c32ecac7fe6202737ed0)) + +* add config file path ([`00d3ca2`](https://github.com/datajoint/element-array-ephys/commit/00d3ca22e2fa6634fb62ac94c5958c233a218528)) + +* disable other jobs ([`5469e77`](https://github.com/datajoint/element-array-ephys/commit/5469e77a48483c4870c42721c89802d536044ef7)) + +* docker and github wf ([`730614b`](https://github.com/datajoint/element-array-ephys/commit/730614b8feb1b966e2150813ad9339f13a1a9bff)) + +* small change ([`f40c188`](https://github.com/datajoint/element-array-ephys/commit/f40c1887bbcc1b6e1111258d35659709eb7f4cc8)) + +* move docs to src ([`33224a9`](https://github.com/datajoint/element-array-ephys/commit/33224a9087ad988fb0b4429deca4d385017d3043)) + +* cleanup ([`4399034`](https://github.com/datajoint/element-array-ephys/commit/4399034e9ae66ae2cbcd6df998f0c65466147c16)) + +* clean up and tweak dark mode theme ([`72e3aa6`](https://github.com/datajoint/element-array-ephys/commit/72e3aa6c29646351a602908696b84b7b852d31d7)) + +* tweak dark mode theme for codeblocks ([`95e3925`](https://github.com/datajoint/element-array-ephys/commit/95e3925d85a1d1da5122bba160c89afe6c70f49c)) + +* docstring example ([`c4d3bde`](https://github.com/datajoint/element-array-ephys/commit/c4d3bde2fb9c4e4bb5f9981468c360808428fe0a)) + +* light and dark themes ([`b1f7399`](https://github.com/datajoint/element-array-ephys/commit/b1f7399984bc3f3c1707b43723c342ca6b8cd42e)) + +* dj light theme ([`724d870`](https://github.com/datajoint/element-array-ephys/commit/724d870489c9346044bc305065622dee04a05f0e)) + +* set up mkdocs ([`f2a5e7c`](https://github.com/datajoint/element-array-ephys/commit/f2a5e7c0bcd7c85f83a535a53a739cda4e81e026)) + +## v0.1.4 (2022-07-11) + +### Unknown + +* Merge pull request #83 from kabilar/main + +Fix for `spike_depths` attribute ([`ee0e179`](https://github.com/datajoint/element-array-ephys/commit/ee0e179d0ed02212f03a36382a07409b6ba2f823)) + +* Update changelog ([`a97dd3c`](https://github.com/datajoint/element-array-ephys/commit/a97dd3c4fd468832721d6bedc3b796a89f01b3b9)) + +* Fix if statement ([`c66ff8f`](https://github.com/datajoint/element-array-ephys/commit/c66ff8f7311768f6916b859319570a4d267a423f)) + +* Update changelog ([`0da5e91`](https://github.com/datajoint/element-array-ephys/commit/0da5e915a97f6c9b1ed5d5f8c8b7c551def38440)) + +* Update changelog and version ([`1865be6`](https://github.com/datajoint/element-array-ephys/commit/1865be641b4a62b87acb8b5a68c0ceb8914aede8)) + +* Fix for truth value of array ([`787d33d`](https://github.com/datajoint/element-array-ephys/commit/787d33d6ce478976a2c2e49d72fe11be90b5782f)) + +## v0.1.3 (2022-06-16) + +### Unknown + +* Merge pull request #79 from kabilar/main + +Update `precluster_output_dir` to nullable ([`ecd6a4c`](https://github.com/datajoint/element-array-ephys/commit/ecd6a4c0212ebb54dd1d256a384aa0b8bf7785f7)) + +* Set precluster_output_dir to nullable ([`90f3ed1`](https://github.com/datajoint/element-array-ephys/commit/90f3ed177587dc364e9b2548afb515809b549ec8)) + +## v0.1.2 (2022-06-09) + +### Unknown + +* Merge pull request #78 from kabilar/main + +Fix for case where `pc_features.npy` does not exist ([`a01530c`](https://github.com/datajoint/element-array-ephys/commit/a01530ca2216787f2b69906f596a4b785323cf50)) + +* Fix format ([`6b6f448`](https://github.com/datajoint/element-array-ephys/commit/6b6f448c9b3ca88d3106b37bb5a7bb474ce4d157)) + +* Update element_array_ephys/ephys_chronic.py ([`558e0b9`](https://github.com/datajoint/element-array-ephys/commit/558e0b94537e0d3b3c9c3d83823e9ff8a9212c57)) + +* Update element_array_ephys/ephys_acute.py ([`44dbe8c`](https://github.com/datajoint/element-array-ephys/commit/44dbe8cc84bf009bee1abdf22adc118ee6564457)) + +* Update element_array_ephys/readers/kilosort.py + +Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`a392e57`](https://github.com/datajoint/element-array-ephys/commit/a392e57868ec9d9b356cf3c1a6e57b0dc33fbb1b)) + +* Update element_array_ephys/ephys_precluster.py + +Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`b3922fc`](https://github.com/datajoint/element-array-ephys/commit/b3922fc58e213b52ca0481c45ca0bcc7a01d1e1c)) + +* Update version and changelog ([`3a2671a`](https://github.com/datajoint/element-array-ephys/commit/3a2671a1b4d4dff344ac3431357482a4ce5c270c)) + +* Handle case where pc_features does not exist ([`c16fda2`](https://github.com/datajoint/element-array-ephys/commit/c16fda209410974116eea0bc893eb8542ca2afa0)) + +* Flatten channel map ([`cdce624`](https://github.com/datajoint/element-array-ephys/commit/cdce624300b20272d5662f0fdb7ec20d436148e1)) + +* Handle case where pc_features does not exist ([`c428e47`](https://github.com/datajoint/element-array-ephys/commit/c428e47c17a69fd0812cf4ad224db0ccff0ca036)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`d53f7a9`](https://github.com/datajoint/element-array-ephys/commit/d53f7a9228dc03c86c0ccb8392f39bc8d67d3b40)) + +## v0.1.1 (2022-06-01) + +### Unknown + +* Merge pull request #72 from kabilar/main + +Add attributes to describe parameter list ([`a20ab9b`](https://github.com/datajoint/element-array-ephys/commit/a20ab9b7879e8cf4131206bf3dbb099d557b0233)) + +* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`d618c55`](https://github.com/datajoint/element-array-ephys/commit/d618c5577f951791cc646ba6617847058b65516f)) + +* Update CHANGELOG.md ([`81e1643`](https://github.com/datajoint/element-array-ephys/commit/81e164399caf1ee14141890a02c904d23530d6d5)) + +* Update element_array_ephys/ephys_precluster.py ([`34a544e`](https://github.com/datajoint/element-array-ephys/commit/34a544e08509edf46962b0cf0c3753477100e732)) + +* Set spike_depths as nullable attribute ([`4142468`](https://github.com/datajoint/element-array-ephys/commit/4142468ba4f83fd95da6a6f988e95f97b5907555)) + +* Update length ([`2ce12c1`](https://github.com/datajoint/element-array-ephys/commit/2ce12c1cfed6e4e983dd19cfcb5f0d021f49c845)) + +* Update diff ([`8366b60`](https://github.com/datajoint/element-array-ephys/commit/8366b60c641b15af5aa40720248564fd606e4bd5)) + +* Update version and changelog ([`ad9a4b9`](https://github.com/datajoint/element-array-ephys/commit/ad9a4b97d741a72fc18c093c5806f9a732dac54b)) + +* Add description attribute ([`08fb06a`](https://github.com/datajoint/element-array-ephys/commit/08fb06af3ad45be1c8dcf1576303d3c085f8593e)) + +* Merge pull request #65 from kabilar/main + +Add `ephys_precluster` module ([`3eeae51`](https://github.com/datajoint/element-array-ephys/commit/3eeae51bd34570c95dcec945eb1b55771edeb902)) + +* Add ephys_chronic image ([`c82c23b`](https://github.com/datajoint/element-array-ephys/commit/c82c23b72e169b0bb5b2cc3057b5af6323e052d6)) + +* Add precluster image ([`a31abba`](https://github.com/datajoint/element-array-ephys/commit/a31abba2ef5f5bf8d49449dda4a20286765d8a87)) + +* Raise error ([`92e30ee`](https://github.com/datajoint/element-array-ephys/commit/92e30ee03194ea5cf1eeba076c324f73c9b7ebf6)) + +* Merge branch 'main' of kabilar/element-array-ephys ([`cd31e0b`](https://github.com/datajoint/element-array-ephys/commit/cd31e0b292ffda20cc0d0f6096591fc7e1350329)) + +* Update element_array_ephys/ephys_precluster.py ([`5bbb727`](https://github.com/datajoint/element-array-ephys/commit/5bbb727d4ccccdab1add41c21ad505fddde94ebe)) + +* Update name ([`3df0981`](https://github.com/datajoint/element-array-ephys/commit/3df0981f7011d6871bf3772f4ba98917e89cf80b)) + +* Update changelog ([`bbe9f3f`](https://github.com/datajoint/element-array-ephys/commit/bbe9f3f445efb35f287405469b6226d0bd4a2f7e)) + +* Update version ([`44c86bf`](https://github.com/datajoint/element-array-ephys/commit/44c86bfdf665f71946375775d5b1ac12323b08d5)) + +* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`2bd2234`](https://github.com/datajoint/element-array-ephys/commit/2bd2234030f477205108054ec70dc195e5ebae8c)) + +* Update element_array_ephys/ephys_precluster.py ([`dc0fc1f`](https://github.com/datajoint/element-array-ephys/commit/dc0fc1f50aa1d950d89cb8b355e992a3fdcb3125)) + +* Update element_array_ephys/ephys_precluster.py ([`f2baf12`](https://github.com/datajoint/element-array-ephys/commit/f2baf12fe2d9a6417f2d63674563570bb72453af)) + +* Update element_array_ephys/ephys_precluster.py ([`ec0ebf2`](https://github.com/datajoint/element-array-ephys/commit/ec0ebf206dcfff530b0d9f3c2c7dfa50ef1d66f3)) + +* Update element_array_ephys/ephys_precluster.py ([`8d793ac`](https://github.com/datajoint/element-array-ephys/commit/8d793ac82d3be366d4a07900c399d39210ff2ad0)) + +* Add documentation for ephys modules ([`644a114`](https://github.com/datajoint/element-array-ephys/commit/644a114b72e8ae4efc332291721d3987aa22a007)) + +* Rename image ([`91950b0`](https://github.com/datajoint/element-array-ephys/commit/91950b0b1d88d92563ec5ee02245602e0c022480)) + +* Merge 'main' of datajoint/element-array-ephys ([`1b60995`](https://github.com/datajoint/element-array-ephys/commit/1b60995453afae074d2b32d33dabbf06044b1dad)) + +* Merge pull request #44 from bendichter/convert_to_nwb + +Convert to nwb ([`7a4fba9`](https://github.com/datajoint/element-array-ephys/commit/7a4fba9ba51d6ee1cf21bb7eaf87a59b4accfd44)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`9ee6088`](https://github.com/datajoint/element-array-ephys/commit/9ee60885e638e85746a093c791a848dbd37f2472)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`441cfe2`](https://github.com/datajoint/element-array-ephys/commit/441cfe2e2ab00765ad9603d28f0bd8a50d48d1d1)) + +* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`6fc51b0`](https://github.com/datajoint/element-array-ephys/commit/6fc51b055ef77069f04bc72cf7999d8e0c6717b0)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`9f9872c`](https://github.com/datajoint/element-array-ephys/commit/9f9872c37eb325b441703d4204889f6298d1ba4e)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`fb98327`](https://github.com/datajoint/element-array-ephys/commit/fb983274e8294fbee5703e6ccaaa3d46ad1394b4)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`d94453b`](https://github.com/datajoint/element-array-ephys/commit/d94453b1e7704ef41f830ae4c7e06f569a37f545)) + +* remove ephys_no_curation.py ([`3e07c61`](https://github.com/datajoint/element-array-ephys/commit/3e07c61b7556fffea3c7daa7409f70f51541e76e)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`59028cb`](https://github.com/datajoint/element-array-ephys/commit/59028cb22c1ee533d4635d677eea897a933cbf71)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`068ea3d`](https://github.com/datajoint/element-array-ephys/commit/068ea3d3682cc00d988807e804744102f8c0e359)) + +* trying clustering_query..proj() ([`c2004ea`](https://github.com/datajoint/element-array-ephys/commit/c2004eabf06822d563e06515e50476ac76ec3610)) + +* rmv units_query ([`0152c5d`](https://github.com/datajoint/element-array-ephys/commit/0152c5d97dd6b40119312f50da62f31e476dbad4)) + +* fix insertion record ([`82c8655`](https://github.com/datajoint/element-array-ephys/commit/82c86559e86e64580b740325741a5f62e6cf037f)) + +* add explanation for index parameter ([`707adff`](https://github.com/datajoint/element-array-ephys/commit/707adff4e6fbb451e0582115b85b72b36002ba9e)) + +* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`bc54009`](https://github.com/datajoint/element-array-ephys/commit/bc54009c7520d29d13b2a5c5f3def3e8888502c6)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`842beec`](https://github.com/datajoint/element-array-ephys/commit/842beec80ed5d846aed29a48575db1de1457bdf9)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`766f4eb`](https://github.com/datajoint/element-array-ephys/commit/766f4eb0962cf36e10290cc1ff4dfb27ad74de87)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2526812`](https://github.com/datajoint/element-array-ephys/commit/2526812f664e7e58a9db368e293dc8e7927615d7)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`15044cf`](https://github.com/datajoint/element-array-ephys/commit/15044cf4c85ae67f9e2a73031980180380a0d974)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2e621c0`](https://github.com/datajoint/element-array-ephys/commit/2e621c02a4809e1c5a540ddc7e22c07f5fcdec1b)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`a01ee9c`](https://github.com/datajoint/element-array-ephys/commit/a01ee9ca8e277265382adb674b707ba67f173c01)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`cbefcde`](https://github.com/datajoint/element-array-ephys/commit/cbefcde9f81de6be64eeb31a04631fbb00fe6431)) + +* add explanation in docstring of add_ephys_units_to_nwb ([`3971fe6`](https://github.com/datajoint/element-array-ephys/commit/3971fe6632030d789ab6b159936d3f4dc2f5f878)) + +* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`a7b2abb`](https://github.com/datajoint/element-array-ephys/commit/a7b2abb99baf7295e4549ba5eb0edf7afb9acb63)) + +* Update element_array_ephys/export/nwb/nwb.py ([`c200699`](https://github.com/datajoint/element-array-ephys/commit/c200699f35d8f90be07f4a8fb194e33b504d9f78)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2ee2bd5`](https://github.com/datajoint/element-array-ephys/commit/2ee2bd544ed777c537dc18b6421b4764462f3482)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`9e72773`](https://github.com/datajoint/element-array-ephys/commit/9e72773d701550b9c613dfb208f6543ce227e803)) + +* Merge branch 'main' into convert_to_nwb ([`b3779e5`](https://github.com/datajoint/element-array-ephys/commit/b3779e58a0b5f2be73abfc07c5425272089a9250)) + +* fix docstring for get_electrodes_mapping ([`acdb5f9`](https://github.com/datajoint/element-array-ephys/commit/acdb5f9d25c4f1c1f6e89dd37fd8e1697327a8e9)) + +* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`826335b`](https://github.com/datajoint/element-array-ephys/commit/826335be00a09481b17b06ce6901c152490f301e)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`49fac08`](https://github.com/datajoint/element-array-ephys/commit/49fac083d5de5c853acb8833460edaa07132638e)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`ba6cbcf`](https://github.com/datajoint/element-array-ephys/commit/ba6cbcf1c3f03cd6a68b96a5b10e28398d9d60e9)) + +* specify releases for dependencies ([`daccfc4`](https://github.com/datajoint/element-array-ephys/commit/daccfc4dd7a48142739dfee5b00a3ee7c9624d19)) + +* add docstring for gain_helper ([`12974ff`](https://github.com/datajoint/element-array-ephys/commit/12974ff0df04fb8dc15b650e5cbbd3b51aa6340f)) + +* Update element_array_ephys/export/nwb/README.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`00c3691`](https://github.com/datajoint/element-array-ephys/commit/00c369144a50cc34acb0fdab12ea0ac93b6627a4)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`abccafa`](https://github.com/datajoint/element-array-ephys/commit/abccafa522f930f099eefd49c14e69eb95c56067)) + +* Update element_array_ephys/export/nwb/README.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`24ac2c0`](https://github.com/datajoint/element-array-ephys/commit/24ac2c038b3e7403a3130e43a385c1bab0acb8f5)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`a1fb193`](https://github.com/datajoint/element-array-ephys/commit/a1fb1934cb34b1ebedb11a10f514cb1ce24b9e00)) + +* Update element_array_ephys/export/nwb/README.md + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`584c738`](https://github.com/datajoint/element-array-ephys/commit/584c738982b53d5988e338302e72f7967b43fe2d)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`c68f2ca`](https://github.com/datajoint/element-array-ephys/commit/c68f2cabfe9d362d4a176a80eb8b87f068f8e317)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`3ae6e2d`](https://github.com/datajoint/element-array-ephys/commit/3ae6e2d0f96ac74d77cf5617719abc638c1fba66)) + +* Update element_array_ephys/ephys_acute.py + +Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`e973743`](https://github.com/datajoint/element-array-ephys/commit/e9737435f55f8b3ef48c10ee691c0bfd37dd7e21)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`b373b26`](https://github.com/datajoint/element-array-ephys/commit/b373b264080e36e379e775499a1b3166c004b546)) + +* fix imports ([`cc14671`](https://github.com/datajoint/element-array-ephys/commit/cc146716036cf8dba74a241160f0b67ae2c552bd)) + +* rmv tests (they are moved to the ephys workflow) ([`c63c8c4`](https://github.com/datajoint/element-array-ephys/commit/c63c8c42fc640319d4cfc4acf90e1b1595e63e3b)) + +* Merge remote-tracking branch 'cbroz1/ben' into convert_to_nwb + +# Conflicts: +# element_array_ephys/export/nwb/nwb.py ([`5b942b3`](https://github.com/datajoint/element-array-ephys/commit/5b942b33d1e4611367f962c70ec571b3d99f2143)) + +* adjust imports in __init__ and nwb.py ([`782a4a5`](https://github.com/datajoint/element-array-ephys/commit/782a4a5ee0cb7c6e1e382c2985dce45c418c7c19)) + +* Merge pull request #2 from ttngu207/chris-nwb + +import the correct ephys module that has been activated ([`2beb41b`](https://github.com/datajoint/element-array-ephys/commit/2beb41b9af183468846a0c03b7de8ab740f855c4)) + +* import the correct ephys module that has been activated ([`b4ffe1d`](https://github.com/datajoint/element-array-ephys/commit/b4ffe1d68fe068a5bd233ad8c9adb3bda09cf145)) + +* Merge pull request #1 from ttngu207/chris-nwb + +nwb function specification in linking_module ([`6d7ad7c`](https://github.com/datajoint/element-array-ephys/commit/6d7ad7c131037b882c1cf7a1183b5438e4635dbd)) + +* nwb function specification in linking_module ([`bf5e82a`](https://github.com/datajoint/element-array-ephys/commit/bf5e82aa63d771151ddd7348d5d37832cad8ac9d)) + +* Avoid linking_module issues. See details. + +- add __init__ + schema from git/ttngu207/element_array_ephys@no-curation +- add ephys_no_curation schema to match the schema ben has been pulling from + - how should we address this in the element? given not currently default +- remove unused imports +- import datajoint and element_interface.utils find_full_path +- add arguments to main export function: + - schema names as datajoint config database prefix default + - ephys_root_data_dir - default to dj.config or none +- add create_virtual_module statements to avoid activate(schema,linking_module=unknown) +- declare ephys and probe as global +- add assert errors for ephys_root_data_dir!=None when needed +- pass ephys_root_data_dir to relevant functions +- above permits: from element_array_ephys.export.nwb.nwb import ecephys_session_to_nwb ([`2595dae`](https://github.com/datajoint/element-array-ephys/commit/2595daee613f1b080b6bc4a6743865e8f9b42dc7)) + +* Rebase, squashed. See Details + +Add element_data_loader for multiple root dirs +Update author +Fix import +Fix OpenEphys session path +Update directory path +Add print statement +Fix for missing `fileTimeSecs` +Update error message +Suggested adds re upstream components +Directing to workflow for upstream `SkullReference` and utility functions ([`a557b17`](https://github.com/datajoint/element-array-ephys/commit/a557b17caaa0d779f91bee105a17fb1418121e00)) + +* rmv subject_id ([`806684f`](https://github.com/datajoint/element-array-ephys/commit/806684f4534c11982e78d50264f47257c3c3018f)) + +* import GenericDataChunkIterator from hdmf ([`a7f4624`](https://github.com/datajoint/element-array-ephys/commit/a7f46242d849a65149cb7207a87ec489402f7452)) + +* add tests for getting lfp data from datajoint ([`fafdde1`](https://github.com/datajoint/element-array-ephys/commit/fafdde1257aafd00b3c47739be8a73a4c7f09087)) + +* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`63b545d`](https://github.com/datajoint/element-array-ephys/commit/63b545dec4d788132bf28832d1156488d478e47e)) + +* Update element_array_ephys/export/nwb/nwb.py + +Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`9374d94`](https://github.com/datajoint/element-array-ephys/commit/9374d94d1372f6f964ecb3bd628c5f97c18c261b)) + +* Merge branch 'main' into convert_to_nwb ([`f8027cc`](https://github.com/datajoint/element-array-ephys/commit/f8027ccedbae3418f3cd8e54d6a0197aeea41c65)) + +* update import path ([`6338daf`](https://github.com/datajoint/element-array-ephys/commit/6338dafd0a00c2eb862726977cf6c2470a8c7b1a)) + +* add tests ([`4cab8c8`](https://github.com/datajoint/element-array-ephys/commit/4cab8c86620094e912e7193c3a579989046d26b1)) + +* refactor into gains_helper ([`7953662`](https://github.com/datajoint/element-array-ephys/commit/79536621e8b6b9f35ed4cb61e5384c433da28bc0)) + +* correctly set conversion and channel_conversion ([`7deb00f`](https://github.com/datajoint/element-array-ephys/commit/7deb00f8e360c5b23e8b63333c55b50ece5334c3)) + +* ephys.find_full_path ([`654d567`](https://github.com/datajoint/element-array-ephys/commit/654d567f8701940bdec6c426d65cb619c3fa2016)) + +* import os ([`ba3f86a`](https://github.com/datajoint/element-array-ephys/commit/ba3f86a2a956e26beb7223cbf09dc779de894a8b)) + +* standardize slashes ([`8d3df71`](https://github.com/datajoint/element-array-ephys/commit/8d3df711fc6f79282e81ac2794c2ca0b5d19c10c)) + +* ephys.get_ephys_root_data_dir ([`e992478`](https://github.com/datajoint/element-array-ephys/commit/e99247874924a39596510fb144f3272f00886804)) + +* import probe ([`379ae11`](https://github.com/datajoint/element-array-ephys/commit/379ae11c718242d04cc6c2eb62a76a433d2b94cf)) + +* import session_to_nwb ([`1dccb68`](https://github.com/datajoint/element-array-ephys/commit/1dccb6893fa8a77408e598f278995998b5c8b45c)) + +* import from workflow pipeline ([`5de22e0`](https://github.com/datajoint/element-array-ephys/commit/5de22e06fa2db7b6e2005bbc094b05fc3581e850)) + +* import from workflow pipeline ([`1b67629`](https://github.com/datajoint/element-array-ephys/commit/1b676293227e03e77293c223f8d3f336c832ee59)) + +* fix nwbfile_kwargs logic ([`eb47ee5`](https://github.com/datajoint/element-array-ephys/commit/eb47ee506db1fd1929da2d557c17776c28675326)) + +* fix nwbfile_kwargs logic ([`96c57f7`](https://github.com/datajoint/element-array-ephys/commit/96c57f756243eb00ced7e157e417ab1af5881c10)) + +* add optional session keys to ecephys_session_to_nwb ([`365b43b`](https://github.com/datajoint/element-array-ephys/commit/365b43b0fa584a531b386afe74eb55e853773a2d)) + +* add datetime import ([`d1f3dab`](https://github.com/datajoint/element-array-ephys/commit/d1f3daba676385a54aa108bb554bce008594b96d)) + +* relative import of ephys ([`1363214`](https://github.com/datajoint/element-array-ephys/commit/13632147a56826fe7ccba065aa02e370950a72c0)) + +* refactor to include requirements for nwb conversion ([`013ae7d`](https://github.com/datajoint/element-array-ephys/commit/013ae7da1bc94ed2c6c5e87b79378e121826882b)) + +* add readme for exporting to NWB ([`19f78b7`](https://github.com/datajoint/element-array-ephys/commit/19f78b78ef4467f1edd7b3270a8df51cdf349a91)) + +* add some docstrings ([`b35ee72`](https://github.com/datajoint/element-array-ephys/commit/b35ee723dc39777dfede2334452021a5c234f6d8)) + +* add missing nwbfile arg ([`a7f846b`](https://github.com/datajoint/element-array-ephys/commit/a7f846be82b020c5b6ace34b524dd99c5438d345)) + +* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb + +# Conflicts: +# element_array_ephys/export/nwb.py ([`b4d9c0e`](https://github.com/datajoint/element-array-ephys/commit/b4d9c0edd791f4f846b99c6bcc98ee688bbb508c)) + +* Update element_array_ephys/export/nwb.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`e837452`](https://github.com/datajoint/element-array-ephys/commit/e83745247b09b369f6992590446c7167c32bddb7)) + +* Update element_array_ephys/export/nwb.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`3298341`](https://github.com/datajoint/element-array-ephys/commit/3298341553063a8e2d165c25cfa18c0050047cde)) + +* Update element_array_ephys/export/nwb.py + +Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`470b20a`](https://github.com/datajoint/element-array-ephys/commit/470b20aa3914fb8c0bed943c7d2f82c0df5dd4fd)) + +* * optimize imports +* black +* upgrade to latest version of conversion-tools +* upgrade to latest spikeinterface api +* ([`d924d57`](https://github.com/datajoint/element-array-ephys/commit/d924d57ea62ec24c791135a16f3ed397493084d5)) + +* add documentation ([`d2b93f2`](https://github.com/datajoint/element-array-ephys/commit/d2b93f2054c51848a20d1a8da44e3cc7a9586973)) + +* * add lfp from source +* docstrings +* json dump insertion into location +* ignore channel_conversion if all are 1 +* black formatting ([`8d75a61`](https://github.com/datajoint/element-array-ephys/commit/8d75a61ac29b0d51381104472a8570db6c99bcb5)) + +* add draft convert to nwb ([`f3dd8d8`](https://github.com/datajoint/element-array-ephys/commit/f3dd8d80c0a0b9a4e16734a35cd0e2f4520a0142)) + +* Mult rootdirs. If sess_dir, check fullpath. Give OpenEphys fullpath. ([`720eb00`](https://github.com/datajoint/element-array-ephys/commit/720eb00e60c6df928f4e6cbe938d4db625feab58)) + +* Add Part table to track order of operations ([`3d8ec16`](https://github.com/datajoint/element-array-ephys/commit/3d8ec16a85507d4708ceedfdcffd4609546dc0f7)) + +* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`fee5b7c`](https://github.com/datajoint/element-array-ephys/commit/fee5b7ca39b564985ad5907dcbdfd58f719beeec)) + +* Merge pull request #1 from kabilar/precluster + +Merge to main ([`1e92b04`](https://github.com/datajoint/element-array-ephys/commit/1e92b048c2da3dd50916df1825bf8e477de9bb05)) + +* Add pre-clustering tables to acute module ([`c7a155a`](https://github.com/datajoint/element-array-ephys/commit/c7a155a9d4ab384d667046fefb316df7d00ed656)) + +* Create copy of `ephys_acute.py` ([`122d9a2`](https://github.com/datajoint/element-array-ephys/commit/122d9a2a598263340d1dcdc49353ac3916ffc3b7)) + +* Merge pull request #64 from kabilar/main + +Update README ([`14517a2`](https://github.com/datajoint/element-array-ephys/commit/14517a27288cf2a2ddfd526da64a2d32da69b156)) + +* Replace italics with back tick ([`b3f5b29`](https://github.com/datajoint/element-array-ephys/commit/b3f5b295ed233aedad5db1f872be18c2df644951)) + +* Replace italics with back tick ([`e8350d5`](https://github.com/datajoint/element-array-ephys/commit/e8350d5bd5e02a7150c7c5c2589daef8aaaa54a8)) + +* Add ephys schema text ([`112f325`](https://github.com/datajoint/element-array-ephys/commit/112f325d2bdc2e940756531282ac38f5e67bc67d)) + +* Add activation text ([`edc9d5d`](https://github.com/datajoint/element-array-ephys/commit/edc9d5dd3c972ea83042b3f844d5011d2602d1c3)) + +* Add ephys schema text ([`ab92d84`](https://github.com/datajoint/element-array-ephys/commit/ab92d847e2ec39d7f0d6dc851ac6ef26bfaf7bcb)) + +* Revert "Create copy of `ephys_acute.py`" + +This reverts commit b66109b5e61297a10c1cc8a929115fa5955238e1. ([`000308f`](https://github.com/datajoint/element-array-ephys/commit/000308f7261794e6acd39762e524c27331bb1a0a)) + +* Add probe schema text ([`4f8699f`](https://github.com/datajoint/element-array-ephys/commit/4f8699fa5859e4a873c5dc32fbf4fae64e5073af)) + +* Create copy of `ephys_acute.py` ([`c7393fc`](https://github.com/datajoint/element-array-ephys/commit/c7393fcb4933846b6862548a47c29cadc4d97801)) + +* Revert "Create copy of `ephys_acute.py`" + +This reverts commit b66109b5e61297a10c1cc8a929115fa5955238e1. ([`9ddfb4c`](https://github.com/datajoint/element-array-ephys/commit/9ddfb4c927cca7a8de40965752d7eac2e06bd07d)) + +* Update format ([`f940a71`](https://github.com/datajoint/element-array-ephys/commit/f940a719d9c409eb40b760f8513ba9737b5cf809)) + +* Add collapsible sections ([`338a796`](https://github.com/datajoint/element-array-ephys/commit/338a796ff2ca3cddf460309c6776034f6409aeed)) + +* Update format ([`9b68b03`](https://github.com/datajoint/element-array-ephys/commit/9b68b0332d1cd228327fea70459b1383b77b4473)) + +* Add collapsible section ([`8740b2c`](https://github.com/datajoint/element-array-ephys/commit/8740b2ca2039a1a4d4fd7ab9a2effcabfbc9d7d6)) + +* Add citation section ([`c8ac8e6`](https://github.com/datajoint/element-array-ephys/commit/c8ac8e656ddd374d3855fc22241c304624d925dd)) + +* Add links to elements.datajoint.org ([`0bc69c2`](https://github.com/datajoint/element-array-ephys/commit/0bc69c217cc679a917de97587f5d2bd205e7a41a)) + +* Add link to elements.datajoint.org ([`d11af2f`](https://github.com/datajoint/element-array-ephys/commit/d11af2f30659b983b936b922264e6d583ce4bc86)) + +* Move background file to datajoint-elements repo ([`68c94c1`](https://github.com/datajoint/element-array-ephys/commit/68c94c19884dcc41f998852537e97d8b231a7ccf)) + +* Create copy of `ephys_acute.py` ([`b66109b`](https://github.com/datajoint/element-array-ephys/commit/b66109b5e61297a10c1cc8a929115fa5955238e1)) + +* Move background file to datajoint-elements repo ([`f8a3abf`](https://github.com/datajoint/element-array-ephys/commit/f8a3abfa0af6abffec43654b0c75ae32c45c71c3)) + +* Merge pull request #58 from kabilar/main + +Add attributes and rename module ([`1f7a2a3`](https://github.com/datajoint/element-array-ephys/commit/1f7a2a36fe0162c0ba6241f18ff34338b323e854)) + +* Ensure backwards compatibility ([`fc38bb5`](https://github.com/datajoint/element-array-ephys/commit/fc38bb588f03fe6971bd269a19498b50b7c2d6c7)) + +* Update string formatting ([`0d56f2e`](https://github.com/datajoint/element-array-ephys/commit/0d56f2e62be96fd0a94b6a43142c0cb620b16fb7)) + +* Rename module `ephys` to `ephys_acute` ([`1104ab4`](https://github.com/datajoint/element-array-ephys/commit/1104ab4b8820c883857e7412374c20bbc2b27689)) + +* Add recording metadata ([`65b9ece`](https://github.com/datajoint/element-array-ephys/commit/65b9ece60e0ec988ed201b9d4d036e1c1535fb7a)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`b13995e`](https://github.com/datajoint/element-array-ephys/commit/b13995e230732f8e6a3b957b1388d0d82e79a274)) + +* Merge pull request #35 from kabilar/main + +Implement `find_full_path` within `ephys` modules ([`088093d`](https://github.com/datajoint/element-array-ephys/commit/088093d068c444311940a195f4b7408bbf0db429)) + +* Increase datatype size ([`6b17239`](https://github.com/datajoint/element-array-ephys/commit/6b1723940edf595329235c4805bfcc5b1b6544a8)) + +* Rename package ([`6f9507c`](https://github.com/datajoint/element-array-ephys/commit/6f9507c3d752f5bd010e1cf24a7369060d77b8d7)) + +* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`ce6adf1`](https://github.com/datajoint/element-array-ephys/commit/ce6adf1c6a7409ecd98907f3a3a5c6d50f0c10bb)) + +* Suggested adds re upstream components + +Directing to workflow for upstream `SkullReference` and utility functions ([`4ca9b32`](https://github.com/datajoint/element-array-ephys/commit/4ca9b328f705d9363dd70a88ad857c5994f65d77)) + +* Update error message ([`09e8a96`](https://github.com/datajoint/element-array-ephys/commit/09e8a96504898f7840b031d09ce9346b639b600f)) + +* Remove print statement ([`1a4a7f5`](https://github.com/datajoint/element-array-ephys/commit/1a4a7f5c0834f62a64a6508de0d0b5de148a4657)) + +* [WIP] Add print statement ([`84bb616`](https://github.com/datajoint/element-array-ephys/commit/84bb6169c98fedfea50418a10c31e870b1e8913f)) + +* Fix for missing `fileTimeSecs` ([`665cc28`](https://github.com/datajoint/element-array-ephys/commit/665cc287b5b84cfe961bca3e47c9ff407483a2b9)) + +* Update module import ([`818cc53`](https://github.com/datajoint/element-array-ephys/commit/818cc53edb5395a1cc845958a373365679174f22)) + +* Fixed doc string ([`9881350`](https://github.com/datajoint/element-array-ephys/commit/98813508f9a77ee3110d8df055957308361273d5)) + +* Update module import ([`139e99b`](https://github.com/datajoint/element-array-ephys/commit/139e99b4d8dfec9c267dd8718b533cdb5a59bc00)) + +* Fix module import ([`44be355`](https://github.com/datajoint/element-array-ephys/commit/44be35568edfab666d48dcaa30a02e72ea65159f)) + +* Remove test print statement ([`cf533a2`](https://github.com/datajoint/element-array-ephys/commit/cf533a275ca1220136472bbdf30048dc9f8c92e9)) + +* [WIP] Add print statement ([`b98192b`](https://github.com/datajoint/element-array-ephys/commit/b98192b8ca2cde9e2babbc48b383673a5ae15a94)) + +* [WIP] Update directory path ([`49c554b`](https://github.com/datajoint/element-array-ephys/commit/49c554bea6a2431140b553098f889f717600da3a)) + +* Update comments ([`ab426c1`](https://github.com/datajoint/element-array-ephys/commit/ab426c1ed9ea14b960bcd3d3e1970c74ef020143)) + +* Fix OpenEphys session path ([`2233c5d`](https://github.com/datajoint/element-array-ephys/commit/2233c5ddff6351b541125b53cb6c49a424cacc72)) + +* [WIP] Print directory path ([`68ef14b`](https://github.com/datajoint/element-array-ephys/commit/68ef14b180c7fd5d61bbac1a9d6ec9d4a7c0530e)) + +* Fix import ([`2be1f08`](https://github.com/datajoint/element-array-ephys/commit/2be1f08af1d428570f5155f7d11463646805886b)) + +* Update author ([`b6b39c0`](https://github.com/datajoint/element-array-ephys/commit/b6b39c093a7603eba1a40b9b3b82db1c6294aac9)) + +* Add element_data_loader for multiple root dirs ([`ffaf60b`](https://github.com/datajoint/element-array-ephys/commit/ffaf60b72b648229b47e76ff9bb75ddedd56ef13)) + +* Move functions to `element-data-loader` ([`4f4be8d`](https://github.com/datajoint/element-array-ephys/commit/4f4be8d264398c3251baae5edc9be37a97c3f753)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`a0f49d2`](https://github.com/datajoint/element-array-ephys/commit/a0f49d27cacaceaf54a688abb4e9de2a153574a8)) + +* Merge pull request #30 from ttngu207/chronic_and_acute + +chronic and acute probe insertions as different python modules ([`1fdbcf1`](https://github.com/datajoint/element-array-ephys/commit/1fdbcf12d1a518e686b6b79e9fbe77b736cb606a)) + +* rename to `ephys_chronic` ([`7474f8f`](https://github.com/datajoint/element-array-ephys/commit/7474f8f2358b784d133277ecd1da8e687ab5fa14)) + +* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into chronic_and_acute ([`f28f0c6`](https://github.com/datajoint/element-array-ephys/commit/f28f0c6f489566e3c0be9cb8235ba7ea80d716f2)) + +* Update Dockerfile ([`d126bc5`](https://github.com/datajoint/element-array-ephys/commit/d126bc53476c5687dfcaf63689d74c18b707cf3e)) + +* chronic and acute probe insertions as different python modules ([`c4a9ab8`](https://github.com/datajoint/element-array-ephys/commit/c4a9ab8214c23c3c61f5f41ffef1c529e2c82b59)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`6472c19`](https://github.com/datajoint/element-array-ephys/commit/6472c19b6b21ffe091c4098ec848802453a63c3d)) + +* Merge pull request #27 from ttngu207/main + +beta 0 release ([`7c67f65`](https://github.com/datajoint/element-array-ephys/commit/7c67f65bcc8844eed429ab5b5a10f457162f9f15)) + +* beta 0 release ([`69a5424`](https://github.com/datajoint/element-array-ephys/commit/69a5424f0404eae379978a7f990b677d62aa42b9)) + +* Merge pull request #26 from ttngu207/main + +bump alpha version for first release on PyPI ([`7cd92ee`](https://github.com/datajoint/element-array-ephys/commit/7cd92ee693779a03abe73f91fca267a82435e59a)) + +* bump alpha version for first release on PyPI ([`99ab2fa`](https://github.com/datajoint/element-array-ephys/commit/99ab2fa6033e14c2ea98a4fced629287042766a6)) + +* Merge pull request #25 from ttngu207/main + +bump alpha version for first release on pypi ([`159a2a5`](https://github.com/datajoint/element-array-ephys/commit/159a2a5d4befc1748ab11f5443239a09df759ea6)) + +* bump alpha version for first release on pypi ([`ab3cfc9`](https://github.com/datajoint/element-array-ephys/commit/ab3cfc922bb76e0d6e3a0930ba3f995a47891802)) + +* Merge pull request #24 from ttngu207/main + +update README, improve markdown formatting, specify `long_description_content_type` to markdown, add versioning and GH Action for PyPI release ([`07f858c`](https://github.com/datajoint/element-array-ephys/commit/07f858c36437a7a79d8a5bddb49d026c03f274ad)) + +* Apply suggestions from code review + +Co-authored-by: Dimitri Yatsenko <dimitri@vathes.com> ([`98753ed`](https://github.com/datajoint/element-array-ephys/commit/98753ed3c3a6cff88f445b633d47b8e27fb4f7df)) + +* minor code cleanup ([`d68d53e`](https://github.com/datajoint/element-array-ephys/commit/d68d53e28eebf51a6f44c53a591b01ee0a894e54)) + +* versioning and GH Action for PyPI release ([`0ffc885`](https://github.com/datajoint/element-array-ephys/commit/0ffc88595b1365992e94f6b53fe9bb7b0d4a75c4)) + +* update diagram ([`9ece8cd`](https://github.com/datajoint/element-array-ephys/commit/9ece8cdc32b6fbe164874bf846f8c0eb26c2d8b7)) + +* update README, improve markdown formatting, specify `long_description_content_type` to markdown ([`3c9662b`](https://github.com/datajoint/element-array-ephys/commit/3c9662bd166d052c43c6ed5fd080bfd1c4b764ec)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`1ce53f3`](https://github.com/datajoint/element-array-ephys/commit/1ce53f37081b14744e625b8896df963af24cea2a)) + +* Merge pull request #23 from ttngu207/main + +added comments to tables ([`f2ac602`](https://github.com/datajoint/element-array-ephys/commit/f2ac602f71d9e105c584b91aa3b04a9cda6f931e)) + +* added comments to tables ([`f05e1fe`](https://github.com/datajoint/element-array-ephys/commit/f05e1fe5b1b7c992f35df9070049626a48cbcddc)) + +* Merge pull request #22 from ttngu207/main + +bump version - 0.1.0a3 ([`6fcc31a`](https://github.com/datajoint/element-array-ephys/commit/6fcc31ac94afdf1602c9ce5190f682cded37a19b)) + +* Update CHANGELOG.md + +Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`8d8683a`](https://github.com/datajoint/element-array-ephys/commit/8d8683aa6c03b7c834e368820dc13222703d177f)) + +* bump version - 0.1.0a3 ([`f500492`](https://github.com/datajoint/element-array-ephys/commit/f50049292446b8479f4c0d3df83cee03892c85cb)) + +* Merge pull request #21 from ttngu207/main + +GitHub Action for release process ([`283fad0`](https://github.com/datajoint/element-array-ephys/commit/283fad06c73f7b98e4f7f0d469005ded1149ad99)) + +* minor cleanup ([`6120883`](https://github.com/datajoint/element-array-ephys/commit/6120883e50b73ccabd7589a42971c47050c1b002)) + +* Apply suggestions from code review + +Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`ef3578e`](https://github.com/datajoint/element-array-ephys/commit/ef3578e3a6c1eaf3e082c0558c368b49cceb6a24)) + +* re-work `pkg_name` and use README as `long_description` ([`1cbc62a`](https://github.com/datajoint/element-array-ephys/commit/1cbc62aaf9ba42534666e4790debdd3eba5a88d4)) + +* add docker-compose to gitignore ([`fc8f72b`](https://github.com/datajoint/element-array-ephys/commit/fc8f72b3739adbe95a5dcd822bacaef1327aa95c)) + +* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into main ([`5e32e91`](https://github.com/datajoint/element-array-ephys/commit/5e32e91b5af2ff64cb119ed730508c0ac67a2f51)) + +* Apply suggestions from code review + +Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`7cf70d1`](https://github.com/datajoint/element-array-ephys/commit/7cf70d10e7b064cd537121e8c23480939cfeed95)) + +* for testing - update twine upload to testpypi ([`ecc0ab2`](https://github.com/datajoint/element-array-ephys/commit/ecc0ab2aefbea413361371059d9fd22d190b2306)) + +* address review comments, add test-changelog ([`ef7b6c9`](https://github.com/datajoint/element-array-ephys/commit/ef7b6c91c417c5b2cccde7bbc4a08b8f0c5ec02e)) + +* Apply suggestions from code review + +Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`17dc100`](https://github.com/datajoint/element-array-ephys/commit/17dc100159947670b024f83c5e28e35567d444b3)) + +* Update CHANGELOG.md ([`e04f739`](https://github.com/datajoint/element-array-ephys/commit/e04f739df575ddab534e9e3b8aa26c3b2ba41cc1)) + +* version 0.1.0a3 ([`f433189`](https://github.com/datajoint/element-array-ephys/commit/f4331894dc804a62f660303038a831fb273a86e7)) + +* update setup, point back to `datajoint` github ([`c7a1940`](https://github.com/datajoint/element-array-ephys/commit/c7a194023a77cb89686bd2e3685494180eca6099)) + +* GH Action bugfix - bump version ([`f2c9726`](https://github.com/datajoint/element-array-ephys/commit/f2c972601a8b826d505849d74d9f7b6b7d13dcc8)) + +* bugfix, add SDIST_PKG_NAME ([`e8632a3`](https://github.com/datajoint/element-array-ephys/commit/e8632a3b267f8e40717c0ee457e10e35403e5777)) + +* improve package_name parsing ([`be26e4b`](https://github.com/datajoint/element-array-ephys/commit/be26e4b24718830b66a3fc2774b22cf1e448f2b3)) + +* Update development.yaml ([`ff5f5f9`](https://github.com/datajoint/element-array-ephys/commit/ff5f5f900a6beddd835c7d6af366b563cd8f31f8)) + +* Update development.yaml ([`f847aeb`](https://github.com/datajoint/element-array-ephys/commit/f847aebcdb1b099dd987f292d34891c065d71ffb)) + +* add `build` to GH action ([`5052b8e`](https://github.com/datajoint/element-array-ephys/commit/5052b8e7e494f2397bbc0a108ef5e9825c37206f)) + +* change package url - for testing GH release only ([`77f5240`](https://github.com/datajoint/element-array-ephys/commit/77f524093c69ce371627f12a386efa78332b779f)) + +* update changelog - bump version to 0.1.0a3 ([`bae08ad`](https://github.com/datajoint/element-array-ephys/commit/bae08ad5b7995b1246f0ead8f456ebe164f68053)) + +* Update development.yaml ([`d124407`](https://github.com/datajoint/element-array-ephys/commit/d1244077f55ac65ddddd5335a7684eb13280e37c)) + +* Update .gitignore ([`b8eb640`](https://github.com/datajoint/element-array-ephys/commit/b8eb64025e65bd49081d9d7c9e93a7ad3e8dc7fd)) + +* set up release processes for GH Action ([`a94e726`](https://github.com/datajoint/element-array-ephys/commit/a94e7268ca99d33dee0c15d1aac1f47747ca7bfd)) + +* Merge branch 'main' into GH-action-PyPI-release ([`b991fbb`](https://github.com/datajoint/element-array-ephys/commit/b991fbb776b71a5feff09b638f171488b78bd3ee)) + +* add `package_version` ([`02f5387`](https://github.com/datajoint/element-array-ephys/commit/02f5387a7bd4bce569193fecf09198456ea3b7b1)) + +* Create CHANGELOG.md ([`77a7a52`](https://github.com/datajoint/element-array-ephys/commit/77a7a5293f3d97a091a3ba159a87fe08967f9125)) + +* Merge pull request #20 from ttngu207/main + +table renames, code cleanup ([`4cced0e`](https://github.com/datajoint/element-array-ephys/commit/4cced0edd25ef2186dd9498c67632363cf37eab2)) + +* table renames, code cleanup ([`236301a`](https://github.com/datajoint/element-array-ephys/commit/236301ab821107e37d26d59ee00e643c10e7f8d6)) + +* Merge pull request #19 from ttngu207/main + +Code cleanup/optimization, variables renaming for clarity ([`b0fa79f`](https://github.com/datajoint/element-array-ephys/commit/b0fa79ff1895e963f9f4ba6b11160ca4df2d087b)) + +* minor bugfix ([`9b18415`](https://github.com/datajoint/element-array-ephys/commit/9b184159115cd48a60c32f6d406675caeed8147a)) + +* split `find_valid_full_path` to `find_full_path` and `find_root_directory` ([`258839b`](https://github.com/datajoint/element-array-ephys/commit/258839b3a97c03cccbf36deeaa7637724af98bb5)) + +* support `.xlsx` cluster files ([`4e824cf`](https://github.com/datajoint/element-array-ephys/commit/4e824cfbef5cfee6555c77e0341ef069bd174703)) + +* minor wording fix ([`855f8eb`](https://github.com/datajoint/element-array-ephys/commit/855f8eb60c8f9ac3411badaff7fa20ea7d908caa)) + +* remove `get_clustering_root_data_dir()` from docstring ([`6f01562`](https://github.com/datajoint/element-array-ephys/commit/6f01562f8c2b60a497be89474958956e962171c8)) + +* allow root_dir to be a list of potential directories - util function `find_valid_full_path()` for root and path searching ([`6488fee`](https://github.com/datajoint/element-array-ephys/commit/6488fee6499a995756a303416740809b1b5886a7)) + +* code refactor - improve logic for `spikeglx_meta` file search ([`e51113b`](https://github.com/datajoint/element-array-ephys/commit/e51113b94bca3267f2870936c34d1140ceb037f9)) + +* Update version.py ([`91a3824`](https://github.com/datajoint/element-array-ephys/commit/91a382435fc5af8021718e54d57c908a1dc30418)) + +* bugfix ([`669c6e5`](https://github.com/datajoint/element-array-ephys/commit/669c6e53e0882b062cc23969a855205e906c2af0)) + +* improve variables naming in kilosort reader ([`e761501`](https://github.com/datajoint/element-array-ephys/commit/e7615017d168e1360fe0ab7e40c0958d9a9c97e4)) + +* improve variables naming ([`c002646`](https://github.com/datajoint/element-array-ephys/commit/c0026467259cfff561b41fabf7ce5d08e4352911)) + +* improve naming, comments ([`cd28d9b`](https://github.com/datajoint/element-array-ephys/commit/cd28d9b43e319777a0e97f6e79d403623902cb06)) + +* code-cleanup - variables renaming - addressing PR review's comments ([`eb7eb2c`](https://github.com/datajoint/element-array-ephys/commit/eb7eb2c4336fa7e4ed8d109e24e2eba02341b8f0)) + +* Merge pull request #17 from ttngu207/main + +specify a separate `get_clustering_root_data_dir()` - handle cases where raw ephys and clustering results are stored at different root locations (e.g. different mount points) ([`74a7a56`](https://github.com/datajoint/element-array-ephys/commit/74a7a5669f0aad4be3b430f93dd3efaad24af920)) + +* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into main ([`99d761f`](https://github.com/datajoint/element-array-ephys/commit/99d761fd17b4fb410f5729a380424424f1fe5d43)) + +* Apply suggestions from code review - improve docstring/comments + +Co-authored-by: shenshan <shenshanpku@gmail.com> ([`6f8cd8b`](https://github.com/datajoint/element-array-ephys/commit/6f8cd8b832af8861ac29f47ffae13036e1a90b36)) + +* remove Quality Control - will add this as a separate element ([`33a421b`](https://github.com/datajoint/element-array-ephys/commit/33a421b5298c784111a5e62613a1e8a018c48c1c)) + +* comment fix ([`af54831`](https://github.com/datajoint/element-array-ephys/commit/af54831d29596758c871d81128bc3a501cb25a98)) + +* naming bugfix ([`75d31a5`](https://github.com/datajoint/element-array-ephys/commit/75d31a5c36ecb575171ee77c7aecb31348533c08)) + +* rename "OpenEphys" to "Open Ephys" ([`bc2f528`](https://github.com/datajoint/element-array-ephys/commit/bc2f528b0bc8a94f21eb21f2c65d6fa8c5b5a409)) + +* added `QualityControl` as a master-table and make `ClusterQualityMetrics` the part-table now - no need for modified `key_source` ([`2c9a787`](https://github.com/datajoint/element-array-ephys/commit/2c9a787950c7ae97f87c4a1dc998565bdb0a65ee)) + +* Waveform table now a master table, with Waveform.Unit as part-table (no need for modified `key_source`) ([`31e2320`](https://github.com/datajoint/element-array-ephys/commit/31e2320955525b919246bd5aae6f85ef25ec30b7)) + +* openephys loader - code cleanup ([`033240f`](https://github.com/datajoint/element-array-ephys/commit/033240f97a049f8b6c23d17df4548252d89ae70d)) + +* creating Neuropixels probe (in ProbeType) as part of `probe` module activation ([`655115b`](https://github.com/datajoint/element-array-ephys/commit/655115bcc7a4530fea488d9737c12be42da046ee)) + +* tweaks to LFP and waveform ingestion - do in small batches to mitigate memory issue ([`dba0a48`](https://github.com/datajoint/element-array-ephys/commit/dba0a48726553913a77b6a65b9540713da505b73)) + +* minor updates to "jsiegle" PR - code, variable-naming cleanup + +Handle cases where the plugin is `Neuropix-PXI` but `NP_PROBE` is not present in `processor['EDITOR']` (only `PROBE`) ([`dcf8906`](https://github.com/datajoint/element-array-ephys/commit/dcf89069aa289d300779cb34bb4c3940be535bef)) + +* improve docstring/description ([`bebec1a`](https://github.com/datajoint/element-array-ephys/commit/bebec1ac563fc623fcc92125943ec410905230b2)) + +* 'enable_python_native_blobs' = True ([`fcb5983`](https://github.com/datajoint/element-array-ephys/commit/fcb5983a77981182670d3294cac60d82bc9bc501)) + +* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`2adf2e0`](https://github.com/datajoint/element-array-ephys/commit/2adf2e06af7a89120d8d07cfb33926f216420bf2)) + +* Merge pull request #16 from jsiegle/main + +Update Open Ephys data reader ([`cf39185`](https://github.com/datajoint/element-array-ephys/commit/cf391854d0510ec1d4a903c478f880250523780a)) + +* Update Open Ephys data reader ([`a85e835`](https://github.com/datajoint/element-array-ephys/commit/a85e83532f017da405ac67fd7e5d135a52d07a9f)) + +* specify a separate `get_clustering_root_data_dir()` - handle cases where raw ephys and clustering results are stored a different root locations (e.g. different mount points) ([`ce90dc6`](https://github.com/datajoint/element-array-ephys/commit/ce90dc6b212d64ea158c6392390f813cfa7a4df7)) + +* add `version.py` ([`4185ba3`](https://github.com/datajoint/element-array-ephys/commit/4185ba3adb0ccfeac6e87b1220045ca5d8753fd7)) + +* Update .gitignore ([`f69e491`](https://github.com/datajoint/element-array-ephys/commit/f69e491c56e4e87b9a35c922a55a19658bd82030)) + +* Merge pull request #13 from ttngu207/main + +Mostly code cleanup - formatting ([`f07d131`](https://github.com/datajoint/element-array-ephys/commit/f07d13106edafdad87ae28f229907ff6847982e3)) + +* more code-cleanup ([`ea1547f`](https://github.com/datajoint/element-array-ephys/commit/ea1547fb4b31f748b9f5c2f92d622fcb752b1d73)) + +* Update setup.py ([`15ca803`](https://github.com/datajoint/element-array-ephys/commit/15ca803536aba0bb292d5ed3942ed85e1d4793e9)) + +* Update Background.md ([`0833d10`](https://github.com/datajoint/element-array-ephys/commit/0833d106cf4a6de9b0eb8acc9a5bf372fde1b979)) + +* cleanup ([`6c490f8`](https://github.com/datajoint/element-array-ephys/commit/6c490f8a96a09d0e681c63b36951d4def1fcbd7b)) + +* Update README.md ([`40ce9e6`](https://github.com/datajoint/element-array-ephys/commit/40ce9e68c5b88d842135390fa8378bb42d2a9947)) + +* rename `elements-ephys` -> `element-array-ephys` ([`fa369f0`](https://github.com/datajoint/element-array-ephys/commit/fa369f04c43e5f6e7cb68870bf58a1d8910888e0)) + +* Update README.md ([`a573e5c`](https://github.com/datajoint/element-array-ephys/commit/a573e5c257623bbdf93f29ec4d9a2184feab3162)) + +* Update Background.md ([`cf2f172`](https://github.com/datajoint/element-array-ephys/commit/cf2f172c2a6ee5729d913e4f882c7a7d3b30168d)) + +* Update Background.md ([`dfff966`](https://github.com/datajoint/element-array-ephys/commit/dfff966bf190d4d9d41bd6150346b52d44edf30b)) + +* added Background section ([`653b84f`](https://github.com/datajoint/element-array-ephys/commit/653b84f73b8131733cb33546fd3234e85078b800)) + +* code cleanup - formatting ([`7ab0c2a`](https://github.com/datajoint/element-array-ephys/commit/7ab0c2a4c1cca04be1271b593d1e944a565a64b3)) + +* Create CONTRIBUTING.md ([`1ee37ab`](https://github.com/datajoint/element-array-ephys/commit/1ee37ab341bd0959aea572321168646e9cc97dbf)) + +* Merge pull request #10 from ttngu207/main + +Ephys pipeline with support for multiple curations ([`983d61a`](https://github.com/datajoint/element-array-ephys/commit/983d61a89ccc42f114a51915261b443e2c2b153e)) + +* update diagrams ([`e98b34f`](https://github.com/datajoint/element-array-ephys/commit/e98b34f52aaa0a042810685f8b896c2288774131)) + +* Update requirements.txt ([`bab8e1d`](https://github.com/datajoint/element-array-ephys/commit/bab8e1d5cfbd0930323b8716d4eb80550a106bda)) + +* bugfix in spikeglx get original channels ([`f8244c8`](https://github.com/datajoint/element-array-ephys/commit/f8244c89ab86d83abad5ef870639180d6a751c4d)) + +* Merge branch 'multiple-curations' into main ([`bfab1dd`](https://github.com/datajoint/element-array-ephys/commit/bfab1dde4dc7b3620bd4cd0950460da71ac18a2e)) + +* bugfix in Unit ingestion ([`adfd5af`](https://github.com/datajoint/element-array-ephys/commit/adfd5af9632f7987a427d7ff07d926e85f90bff3)) + +* added a `CuratedClustering` as master table for `Unit` ([`7bd751a`](https://github.com/datajoint/element-array-ephys/commit/7bd751a8bc2574b14180eb39016cdb620358c4a5)) + +* Update openephys.py ([`a889407`](https://github.com/datajoint/element-array-ephys/commit/a8894072c7d84e375ef9ca458d7556703916bfaf)) + +* minor code cleanup ([`b0011a1`](https://github.com/datajoint/element-array-ephys/commit/b0011a18ee199afb878bbf8c152d526331a2a820)) + +* `Curation` downstream from `Clustering` - move `Curation` insertion in `Clustering.make()` to a separate utility function ([`6859e52`](https://github.com/datajoint/element-array-ephys/commit/6859e52ba4832f7dd714c3890552b243ecffd6c7)) + +* Merge branch 'main' into multiple-curations ([`64bd47d`](https://github.com/datajoint/element-array-ephys/commit/64bd47d72aa3bcd0de31d22a46db5be821ce88f1)) + +* prototype design for multiple curations ([`94686f5`](https://github.com/datajoint/element-array-ephys/commit/94686f5d2237f16a7cb9885f0ffb6fc11db49785)) + +* Merge pull request #9 from ttngu207/main + +keep `_timeseries` data as memmap int16 type, apply bitvolt conversion at LFP/Waveform extraction step & Bugfix in channel matching for SpikeGLX ([`70a813b`](https://github.com/datajoint/element-array-ephys/commit/70a813b207bba72bb3a268a797ef156a53c15c7a)) + +* Update elements_ephys/readers/spikeglx.py + +Co-authored-by: Dimitri Yatsenko <dimitri@vathes.com> ([`93ea01a`](https://github.com/datajoint/element-array-ephys/commit/93ea01a9bad217fad18a77a99b2df46b0986828c)) + +* minor formatting, PEP8 ([`d656108`](https://github.com/datajoint/element-array-ephys/commit/d65610889bba20fe468c5a97663769c3a97cf418)) + +* datajoint version 0.13+ required ([`39580e1`](https://github.com/datajoint/element-array-ephys/commit/39580e14f2ffc0d3772c3267e5525e8f9216a5b4)) + +* bugfix - openephys waveform extraction ([`825407c`](https://github.com/datajoint/element-array-ephys/commit/825407c5f3fae0def1291dfaf6b87bdaf14ea5f4)) + +* bugfix ([`4afc0f1`](https://github.com/datajoint/element-array-ephys/commit/4afc0f11e164281b91357f3ac07b8fb3d17cbce8)) + +* try-catch for searching/loading spikeglx files ([`f3d98b3`](https://github.com/datajoint/element-array-ephys/commit/f3d98b3b14a903c962037ca5406c9a3302475de3)) + +* keep `_timeseries` data as memmap int16 type, apply bitvolt conversion only when needed (at LFP or waveform extraction) ([`f9e5fc2`](https://github.com/datajoint/element-array-ephys/commit/f9e5fc291c170fcae905c9432d5f50f439a5e891)) + +* Update requirements.txt ([`625c630`](https://github.com/datajoint/element-array-ephys/commit/625c6307d9f9dbb97b953131a166782b230b0f4c)) + +* Update attached_ephys_element.svg ([`1411687`](https://github.com/datajoint/element-array-ephys/commit/1411687ad687fb75e3cb72831bbc580696d9a5ae)) + +* added svg diagram ([`7a0762c`](https://github.com/datajoint/element-array-ephys/commit/7a0762c18c28acdd4009c012eabd8d102b816f76)) + +* Merge pull request #8 from ttngu207/main + +ClusteringTask as manual table - Ingestion support for OpenEphys ([`f76086c`](https://github.com/datajoint/element-array-ephys/commit/f76086c611428ed4d8cc52edee6b240fb805779a)) + +* bugfix: Imax per probe type ([`56f8fdc`](https://github.com/datajoint/element-array-ephys/commit/56f8fdc43db8b1975a4cca46c1702a8670a190c2)) + +* code cleanup - renamed `data` -> `timeseries` ([`6d5ee8b`](https://github.com/datajoint/element-array-ephys/commit/6d5ee8bf68bfc45f9a760515fb399945c85fb6be)) + +* code cleanup, added docstring & comments to code blocks ([`e64dafe`](https://github.com/datajoint/element-array-ephys/commit/e64dafedd53de6ebf2243d6049982738f0e8d56b)) + +* Update spikeglx.py ([`238a511`](https://github.com/datajoint/element-array-ephys/commit/238a511d0030299b650868c78de05e428739a3e0)) + +* bugfix in waveform extraction ([`60e320d`](https://github.com/datajoint/element-array-ephys/commit/60e320d7973490bf3ae77ec0b6c9b86addbab921)) + +* added comment ([`be82f4e`](https://github.com/datajoint/element-array-ephys/commit/be82f4e9a5a262f73c427b5996bf7b3778e105ba)) + +* minor code cleanup ([`8aa11e2`](https://github.com/datajoint/element-array-ephys/commit/8aa11e231140f81fc10347f08a9f46e8c1e345b3)) + +* extract and apply bit-volts conversion for spikeglx loader ([`b5c11f0`](https://github.com/datajoint/element-array-ephys/commit/b5c11f04ae9b7b33fe93efd24fb292090e683d89)) + +* apply channels' gain for the data ([`8ceeb0b`](https://github.com/datajoint/element-array-ephys/commit/8ceeb0b8daea0f4d6d3c1aadf28930b50ae9fec9)) + +* remove `used_in_reference` in ElectrodeConfig + +this is misleading as it's only relevant for SpikeGLX acquisition for denoting channel visualization ([`847eeba`](https://github.com/datajoint/element-array-ephys/commit/847eeba4263c5a050ca7ffafa0cd4e891e099b21)) + +* bugfix in waveform extraction for OpenEphys ([`281e37b`](https://github.com/datajoint/element-array-ephys/commit/281e37b8c4c2da28fb7c94525be0db1b8eb495d4)) + +* bugfix in waveform ingestion ([`3452ab7`](https://github.com/datajoint/element-array-ephys/commit/3452ab721f0dc2022d1aaae0cb919e97cc25a8f8)) + +* code cleanup ([`3784238`](https://github.com/datajoint/element-array-ephys/commit/3784238c6ae8ed3b2c556544c054c3ca15e59e86)) + +* waveform ingestion for OpenEphys ([`1d02cf5`](https://github.com/datajoint/element-array-ephys/commit/1d02cf57ea04ced7a5b8062873069d5b4c473c72)) + +* extract_spike_waveforms() for OpenEphys ([`2d6f22c`](https://github.com/datajoint/element-array-ephys/commit/2d6f22c0a78a0ef7c617322fcc4658c045341ee1)) + +* implement "probe" in OpenEphys as a standalone class ([`045344d`](https://github.com/datajoint/element-array-ephys/commit/045344dc6ac38bf6482208065f95ff0b28aeedb9)) + +* minor bugfix in channel mapping/fetching ([`631837d`](https://github.com/datajoint/element-array-ephys/commit/631837d4e4f1c52a45105ee1817f397221a304cd)) + +* Update spikeglx.py ([`af2831c`](https://github.com/datajoint/element-array-ephys/commit/af2831ce1e4e278315644a2a7e5aab29fa495131)) + +* minor naming bugfix ([`e9d60d7`](https://github.com/datajoint/element-array-ephys/commit/e9d60d7088cc54b29c2b13ec5c5886fd77e5004a)) + +* rename `neuropixels` -> `spikeglx` ([`07982dc`](https://github.com/datajoint/element-array-ephys/commit/07982dc934a1103bdca1369da621cec393b26eea)) + +* LFP ingestion for OpenEphys ([`75149b3`](https://github.com/datajoint/element-array-ephys/commit/75149b3a9a6f3eed51977398ef037fbfe5de27ca)) + +* EphysRecording's `make()` handles OpenEphys ([`f784f12`](https://github.com/datajoint/element-array-ephys/commit/f784f12373eed355f5c45d16796ba9363abc75be)) + +* Update probe.py ([`628c7f0`](https://github.com/datajoint/element-array-ephys/commit/628c7f06bf4764f25c0f9113474e4cb1739e3f01)) + +* update ephys ingestion routine, refactor electrode config generation ([`2750aa9`](https://github.com/datajoint/element-array-ephys/commit/2750aa98b861d6426d7ee9335db7c81412f4ace0)) + +* openephys loader, using pyopenephys pkg ([`5540bbe`](https://github.com/datajoint/element-array-ephys/commit/5540bbe9a09fc5bc287f5973eff00f3766b9e8c3)) + +* Update neuropixels.py ([`eba6b8c`](https://github.com/datajoint/element-array-ephys/commit/eba6b8c1303fee1541b19f3ad72a4a88e54a18b3)) + +* openephys loader, using `open_ephys` pkg ([`a2ba6d6`](https://github.com/datajoint/element-array-ephys/commit/a2ba6d63753e9e302427728df6c23d74a45370a6)) + +* Update LICENSE ([`e29180f`](https://github.com/datajoint/element-array-ephys/commit/e29180fac4da203b47540c8f358bc489ba341993)) + +* Update openephys.py ([`2545772`](https://github.com/datajoint/element-array-ephys/commit/25457726e2faa3a8748ec7410e0e7a6b708b8cbc)) + +* `ClusteringTask` as manual table with user specified paramset_idx and clustering_output_dir ([`6850702`](https://github.com/datajoint/element-array-ephys/commit/6850702d2be133942391597c77805555fcca4216)) + +* Merge branch 'main' into OpenEphys-support ([`7d827a1`](https://github.com/datajoint/element-array-ephys/commit/7d827a11b8cea2952bb0a4d44b6285f1bd052ad9)) + +* infer/store ephys-recording directory, based on `session_dir` ([`38927c2`](https://github.com/datajoint/element-array-ephys/commit/38927c242c0a92323fb4080dac463b7e3ab3c693)) + +* Merge branch 'main' of https://github.com/datajoint/elements-ephys into main ([`8a16bf2`](https://github.com/datajoint/element-array-ephys/commit/8a16bf21ea6b6213b38fff0af7328a44195f2040)) + +* added AcquisitionSoftware ([`7de2127`](https://github.com/datajoint/element-array-ephys/commit/7de2127e0601c7817cf77fd993f6402729840ca5)) + +* minor bugfix: `probe.schema.activate` -> `probe.activate` ([`e278573`](https://github.com/datajoint/element-array-ephys/commit/e278573cd5c250ba9801ec2432f09e647ecb2428)) + +* Create open_ephys.py ([`a28c2da`](https://github.com/datajoint/element-array-ephys/commit/a28c2dac483c4f3366b185dfbd47b4c28c1f4e04)) + +* Merge pull request #7 from ttngu207/main + +update docstring for function `activate` ([`8893fc8`](https://github.com/datajoint/element-array-ephys/commit/8893fc800bfb224e28013d1475c19f59c669ea8d)) + +* update wording, `required_module` -> `linking_module` ([`071bf35`](https://github.com/datajoint/element-array-ephys/commit/071bf353e4376623298826af3187e0fc6c3837fa)) + +* update docstring for function `activate` ([`f11900f`](https://github.com/datajoint/element-array-ephys/commit/f11900f19d6735c0fd4bb4420a05d03670fd6b4e)) + +* Merge pull request #6 from ttngu207/main + +implement new "activation" mechanism -> using dict, module name or module for `requirement` ([`ec58e20`](https://github.com/datajoint/element-array-ephys/commit/ec58e20962689f2d87373209acd4bf07178bfeec)) + +* simplify "activate" no explicit requirements check ([`aa4064c`](https://github.com/datajoint/element-array-ephys/commit/aa4064cd22704fdb69b83ce6d793c1ba307b1a3a)) + +* minor format cleanup ([`822c5b7`](https://github.com/datajoint/element-array-ephys/commit/822c5b742e74ff36861ddd8a652b9bdd48bd03d8)) + +* implement new "activation" mechanism -> using dict, module name or module as 'requirement' ([`c9e7f1e`](https://github.com/datajoint/element-array-ephys/commit/c9e7f1e0b1cfbc77c6f7cffbb93b1bafbeeed731)) + +* bugfix in `paramset_name` -> `paramset_idx` ([`852f5a4`](https://github.com/datajoint/element-array-ephys/commit/852f5a471f0ed0703c774716c46f5484854e9e57)) + +* Merge pull request #5 from ttngu207/main + +minor tweak using `schema.database`, awaiting `schema.is_activated` ([`e9191dd`](https://github.com/datajoint/element-array-ephys/commit/e9191dd6c9c225874aa046ea75c0ea0acc581c17)) + +* minor tweak using `schema.database`, awaiting `schema.is_activated` ([`d606233`](https://github.com/datajoint/element-array-ephys/commit/d606233e80ab3c289c23d852977147823f8e09dc)) + +* Merge pull request #4 from dimitri-yatsenko/main + +ephys.activate inserts required functions into the module namespace ([`dadda0d`](https://github.com/datajoint/element-array-ephys/commit/dadda0d19ecc0afb7043e8ad888d918dacca0378)) + +* ephys.activate inserts required functions into the module namespace ([`1f732d3`](https://github.com/datajoint/element-array-ephys/commit/1f732d39ea11d03f88147fef645f29abc59eede5)) + +* Merge branch 'main' of https://github.com/datajoint/elements-ephys into main ([`3db8461`](https://github.com/datajoint/element-array-ephys/commit/3db84614d914863c9ec76752009bf8450f9439e5)) + +* Merge pull request #3 from ttngu207/main + +code cleanup, bug fix, tested ([`1cc5119`](https://github.com/datajoint/element-array-ephys/commit/1cc51196433e8ff6046f54cbd99162d0a7ed857b)) + +* code cleanup, bug fix, tested ([`11d2aec`](https://github.com/datajoint/element-array-ephys/commit/11d2aec8bbaf9bf2c396d86a7f6e513607476dbf)) + +* Merge pull request #2 from dimitri-yatsenko/main + +Refactor to use schema.activate from DataJoint 0.13 ([`5834b4a`](https://github.com/datajoint/element-array-ephys/commit/5834b4a75e38ffcd6dd1b8333f88300e0f2124cc)) + +* fix imported class names in the ephys module ([`b822e63`](https://github.com/datajoint/element-array-ephys/commit/b822e6313d0d73a2a4d7d26706d1ae712ad806ae)) + +* minor cleanup ([`5edc3ce`](https://github.com/datajoint/element-array-ephys/commit/5edc3ced1129c8807a422876b3da261c2f1d6c11)) + +* update to comply with datajoint 0.13 deferred schema use ([`a925450`](https://github.com/datajoint/element-array-ephys/commit/a925450db74e102dfd6a66dc55e53629a0d41765)) + +* Merge pull request #1 from ttngu207/main + +moved from `canonical-ephys` ([`d1decf2`](https://github.com/datajoint/element-array-ephys/commit/d1decf2ac4c5e021c6c63a554052ed72ee9a1379)) + +* moved from `canonical-ephys` ([`55f7717`](https://github.com/datajoint/element-array-ephys/commit/55f771729d06cd9a8346d4ed0882bd51ae603489)) + +* Create README.md ([`0896c85`](https://github.com/datajoint/element-array-ephys/commit/0896c85193a93550e19775c7c4b02b1fa5f7742f)) diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 6b5406e8..008790cf 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.3.5" +__version__ = "0.3.6" From 463ec527ff216a1078ea97665d365d08d140c621 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 8 Oct 2024 11:56:56 -0500 Subject: [PATCH 152/204] feat(spike_sorting): handle cases when no units/spikes are found --- element_array_ephys/spike_sorting/si_spike_sorting.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 550ae4a1..7a652076 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -275,11 +275,17 @@ def make(self, key): analyzer_output_dir = output_dir / sorter_name / "sorting_analyzer" + has_units = si_sorting.unit_ids.size > 0 + @memoized_result( uniqueness_dict=postprocessing_params, output_directory=analyzer_output_dir, ) def _sorting_analyzer_compute(): + if not has_units: + log.info("No units found in sorting object. Skipping sorting analyzer.") + return + # Sorting Analyzer sorting_analyzer = si.create_sorting_analyzer( sorting=si_sorting, @@ -303,6 +309,8 @@ def _sorting_analyzer_compute(): _sorting_analyzer_compute() + do_si_export = postprocessing_params.get("export_to_phy", False) or postprocessing_params.get("export_report", False) + self.insert1( { **key, @@ -311,8 +319,7 @@ def _sorting_analyzer_compute(): datetime.utcnow() - execution_time ).total_seconds() / 3600, - "do_si_export": postprocessing_params.get("export_to_phy", False) - or postprocessing_params.get("export_report", False), + "do_si_export": do_si_export and has_units, } ) From 451571de595114e59f732c2e1e66298a26d6eeeb Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 8 Oct 2024 15:06:24 -0500 Subject: [PATCH 153/204] feat(spike_sorting): update downstream ephys tables ingestion when NO UNITs found --- element_array_ephys/ephys_no_curation.py | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/element_array_ephys/ephys_no_curation.py b/element_array_ephys/ephys_no_curation.py index dae9e4a9..5a30d81d 100644 --- a/element_array_ephys/ephys_no_curation.py +++ b/element_array_ephys/ephys_no_curation.py @@ -1038,6 +1038,16 @@ def make(self, key): if si_sorting_analyzer_dir.exists(): # Read from spikeinterface outputs import spikeinterface as si + from spikeinterface import sorters + + sorting_file = output_dir / sorter_name / "spike_sorting" / "si_sorting.pkl" + si_sorting_: si.sorters.BaseSorter = si.load_extractor( + sorting_file, base_folder=output_dir + ) + if si_sorting_.unit_ids.size == 0: + logger.info(f"No units found in {sorting_file}. Skipping Unit ingestion...") + self.insert1(key) + return sorting_analyzer = si.load_sorting_analyzer(folder=si_sorting_analyzer_dir) si_sorting = sorting_analyzer.sorting @@ -1241,6 +1251,11 @@ def make(self, key): output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) sorter_name = clustering_method.replace(".", "_") + self.insert1(key) + if not len(CuratedClustering.Unit & key): + logger.info(f"No CuratedClustering.Unit found for {key}, skipping Waveform ingestion.") + return + # Get channel and electrode-site mapping electrode_query = (EphysRecording.Channel & key).proj(..., "-channel_name") channel2electrode_map: dict[int, dict] = { @@ -1294,7 +1309,6 @@ def yield_unit_waveforms(): ] yield unit_peak_waveform, unit_electrode_waveforms - else: # read from kilosort outputs (ecephys pipeline) kilosort_dataset = kilosort.Kilosort(output_dir) @@ -1394,7 +1408,6 @@ def yield_unit_waveforms(): yield unit_peak_waveform, unit_electrode_waveforms # insert waveform on a per-unit basis to mitigate potential memory issue - self.insert1(key) for unit_peak_waveform, unit_electrode_waveforms in yield_unit_waveforms(): if unit_peak_waveform: self.PeakWaveform.insert1(unit_peak_waveform, ignore_extra_fields=True) @@ -1501,6 +1514,11 @@ def make(self, key): output_dir = find_full_path(get_ephys_root_data_dir(), output_dir) sorter_name = clustering_method.replace(".", "_") + self.insert1(key) + if not len(CuratedClustering.Unit & key): + logger.info(f"No CuratedClustering.Unit found for {key}, skipping QualityMetrics ingestion.") + return + si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" if si_sorting_analyzer_dir.exists(): # read from spikeinterface outputs import spikeinterface as si @@ -1556,7 +1574,6 @@ def make(self, key): for unit_key in (CuratedClustering.Unit & key).fetch("KEY") ] - self.insert1(key) self.Cluster.insert(metrics_list, ignore_extra_fields=True) self.Waveform.insert(metrics_list, ignore_extra_fields=True) From b1104ce09158c09175af94682e6e7a0281a7cda2 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Wed, 9 Oct 2024 09:37:00 -0500 Subject: [PATCH 154/204] fix(spike_sorting): create empty `sorting_analyzer` folder when no units found --- element_array_ephys/spike_sorting/si_spike_sorting.py | 1 + 1 file changed, 1 insertion(+) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index 7a652076..a47f1d89 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -284,6 +284,7 @@ def make(self, key): def _sorting_analyzer_compute(): if not has_units: log.info("No units found in sorting object. Skipping sorting analyzer.") + analyzer_output_dir.mkdir(parents=True, exist_ok=True) # create empty directory anyway, for consistency return # Sorting Analyzer From a3f353a7f85412b00a2a9bd4f8419fb4a307af8c Mon Sep 17 00:00:00 2001 From: kushalbakshi Date: Fri, 1 Nov 2024 14:58:41 -0400 Subject: [PATCH 155/204] Fix(spikeglx): robust IMAX value detection from IMEC file --- CHANGELOG.md | 6 ++++++ element_array_ephys/readers/spikeglx.py | 4 ++-- element_array_ephys/version.py | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 54926bd3..7cba7c56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # CHANGELOG +## v0.3.7 (2024-11-01) + +### Fix + +* Fi(spikeglx): robust IMAX value detection from IMEC file + ## v0.3.6 (2024-10-01) ### Chore diff --git a/element_array_ephys/readers/spikeglx.py b/element_array_ephys/readers/spikeglx.py index ca60648d..214b70b9 100644 --- a/element_array_ephys/readers/spikeglx.py +++ b/element_array_ephys/readers/spikeglx.py @@ -96,15 +96,15 @@ def get_channel_bit_volts(self, band="ap"): dataVolts = dataInt * Vmax / Imax / gain """ vmax = float(self.apmeta.meta["imAiRangeMax"]) + imax = self.apmeta.meta.get("imMaxInt") + imax = float(imax) if imax else IMAX[self.apmeta.probe_model] if band == "ap": - imax = IMAX[self.apmeta.probe_model] imroTbl_data = self.apmeta.imroTbl["data"] imroTbl_idx = 3 chn_ind = self.apmeta.get_recording_channels_indices(exclude_sync=True) elif band == "lf": - imax = IMAX[self.lfmeta.probe_model] imroTbl_data = self.lfmeta.imroTbl["data"] imroTbl_idx = 4 chn_ind = self.lfmeta.get_recording_channels_indices(exclude_sync=True) diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 008790cf..6b3c3b3f 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.3.6" +__version__ = "0.3.7" From 3e401f1758a1ce9523e6298ebbdadef2c59990d4 Mon Sep 17 00:00:00 2001 From: kushalbakshi Date: Fri, 1 Nov 2024 15:00:35 -0400 Subject: [PATCH 156/204] Remove merge conflict-resolution change --- element_array_ephys/ephys_acute.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/element_array_ephys/ephys_acute.py b/element_array_ephys/ephys_acute.py index 54a322b5..f93a66a4 100644 --- a/element_array_ephys/ephys_acute.py +++ b/element_array_ephys/ephys_acute.py @@ -180,10 +180,7 @@ def auto_generate_entries(cls, session_key): "probe_type": spikeglx_meta.probe_model, "probe": spikeglx_meta.probe_SN, } - if ( - probe_key["probe"] not in [p["probe"] for p in probe_list] - and probe_key not in probe.Probe() - ): + if probe_key["probe"] not in [p["probe"] for p in probe_list]: probe_list.append(probe_key) probe_dir = meta_filepath.parent @@ -207,10 +204,7 @@ def auto_generate_entries(cls, session_key): "probe_type": oe_probe.probe_model, "probe": oe_probe.probe_SN, } - if ( - probe_key["probe"] not in [p["probe"] for p in probe_list] - and probe_key not in probe.Probe() - ): + if probe_key["probe"] not in [p["probe"] for p in probe_list]: probe_list.append(probe_key) probe_insertion_list.append( { From 4f704690c547a794e8ad94144e0bc62cb2446e0e Mon Sep 17 00:00:00 2001 From: Kushal Bakshi <52367253+kushalbakshi@users.noreply.github.com> Date: Fri, 1 Nov 2024 15:18:50 -0400 Subject: [PATCH 157/204] Update CHANGELOG.md Co-authored-by: Thinh Nguyen --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7cba7c56..ce98a10a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,7 @@ ### Fix -* Fi(spikeglx): robust IMAX value detection from IMEC file +* Fix(spikeglx): robust IMAX value detection from IMEC file (metadata 3.0) ## v0.3.6 (2024-10-01) From 0508c94df3881e8d7fb28fc7174e958336834962 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Mon, 13 Jan 2025 16:30:55 -0600 Subject: [PATCH 158/204] update: fix docs, new version is `0.4.0` --- docs/src/concepts.md | 2 +- docs/src/index.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/concepts.md b/docs/src/concepts.md index cb57a802..b5da5081 100644 --- a/docs/src/concepts.md +++ b/docs/src/concepts.md @@ -68,7 +68,7 @@ existing spike sorting methods, named by Alessio Buccino, et al. SpikeInterface provides a convenient Python-based wrapper to invoke, extract, compare spike sorting results from different sorting algorithms. SpikeInterface is the primary tool supported by Element Array Electrophysiology for -spike sorting as of version `1.0.0`. +spike sorting as of version `0.4.0`. ## Key Partnerships diff --git a/docs/src/index.md b/docs/src/index.md index 0c828c00..5d9b7f19 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -13,7 +13,7 @@ The Element is comprised of `probe` and `ephys` schemas. Visit the `ephys` schemas and an explanation of the tables. To get started with building your own data pipeline, visit the [Tutorials page](./tutorials/index.md). -Prior to version `1.0.0` , several `ephys` schemas were +Prior to version `0.4.0` , several `ephys` schemas were developed and supported to handle various use cases of this pipeline and workflow. These are now deprecated but still available on their own branch within the repository: From 284106a7e3820971ade67153b7b231435fd3c1eb Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 14 Jan 2025 08:50:20 -0600 Subject: [PATCH 159/204] update: make this the `1.0.0` release --- docs/src/concepts.md | 2 +- docs/src/index.md | 2 +- element_array_ephys/version.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/concepts.md b/docs/src/concepts.md index b5da5081..cb57a802 100644 --- a/docs/src/concepts.md +++ b/docs/src/concepts.md @@ -68,7 +68,7 @@ existing spike sorting methods, named by Alessio Buccino, et al. SpikeInterface provides a convenient Python-based wrapper to invoke, extract, compare spike sorting results from different sorting algorithms. SpikeInterface is the primary tool supported by Element Array Electrophysiology for -spike sorting as of version `0.4.0`. +spike sorting as of version `1.0.0`. ## Key Partnerships diff --git a/docs/src/index.md b/docs/src/index.md index 5d9b7f19..0c828c00 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -13,7 +13,7 @@ The Element is comprised of `probe` and `ephys` schemas. Visit the `ephys` schemas and an explanation of the tables. To get started with building your own data pipeline, visit the [Tutorials page](./tutorials/index.md). -Prior to version `0.4.0` , several `ephys` schemas were +Prior to version `1.0.0` , several `ephys` schemas were developed and supported to handle various use cases of this pipeline and workflow. These are now deprecated but still available on their own branch within the repository: diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 7a2d5521..19ba4c76 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "4.0.0" +__version__ = "1.0.0" From 7c3f0efcc24dbdca4eb67d24c1e810e9769ec579 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Thu, 16 Jan 2025 16:01:50 +0000 Subject: [PATCH 160/204] chore: revert semantic release and changelog --- .github/workflows/semantic-release-caller.yml | 10 - CHANGELOG.md | 2666 +---------------- element_array_ephys/version.py | 2 +- 3 files changed, 137 insertions(+), 2541 deletions(-) delete mode 100644 .github/workflows/semantic-release-caller.yml diff --git a/.github/workflows/semantic-release-caller.yml b/.github/workflows/semantic-release-caller.yml deleted file mode 100644 index bfd7643d..00000000 --- a/.github/workflows/semantic-release-caller.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: semantic-release -on: - workflow_dispatch: - -jobs: - call_semantic_release: - uses: datajoint/.github/.github/workflows/semantic-release.yaml@main - secrets: - APP_ID: ${{ secrets.ELEMENT_APP_ID }} - GET_TOKEN_KEY: ${{ secrets.ELEMENT_GET_TOKEN_KEY }} diff --git a/CHANGELOG.md b/CHANGELOG.md index ce98a10a..1199cfc2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,2590 +1,196 @@ -# CHANGELOG +# Release notes -## v0.3.7 (2024-11-01) +Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and + [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -### Fix +## [v0.3.8] - 2025-01-16 -* Fix(spikeglx): robust IMAX value detection from IMEC file (metadata 3.0) +* Change - Revert GHA Semantic Release caller and update changelog -## v0.3.6 (2024-10-01) +## [v0.3.7] - 2024-11-01 -### Chore +* Fix - robust IMAX value detection from IMEC file (metadata 3.0) -* chore(.github): add new GitHub Action callers for devcontainer, mkdocs, and semantic release ([`fc8ac1d`](https://github.com/datajoint/element-array-ephys/commit/fc8ac1d8159e07714f1041bca67fa3546451f1a0)) +## [v0.3.6] - 2024-10-01 -### Fix +* Fix - Minor bugfix in reading npx probe model of older versions and in reaeding probe model +* Add - New GitHub Action callers for devcontainer, mkdocs, and semantic release -* fix(spikeglx): minor bugfix in reading probe model ([`2d57102`](https://github.com/datajoint/element-array-ephys/commit/2d57102880d872cf1a4ec037eee5892a87536ff2)) +## [0.3.5] - 2024-08-16 -### Unknown +* Fix - Improve `spikeglx` loader in extracting neuropixels probe type from the meta file +* Update - Explicit call to `probe.create_neuropixels_probe_types()` to create entries in `ProbeType` -* Merge pull request #205 from MilagrosMarin/chore/update-gha +## [0.3.4] - 2024-03-22 -chore(.github): add new GitHub Action callers ([`d091ffc`](https://github.com/datajoint/element-array-ephys/commit/d091ffc86b6818fdcf16bfdafaa4f8829d771e7b)) +* Add - pytest +* Update - Ephys schema changed from `ephys_acute` to `ephys_no_curation` in `tutorial.ipynb` -* Merge pull request #202 from ttngu207/main +## [0.3.3] - 2024-01-24 -fix(spikeglx): minor bugfix in reading npx probe model of older versions ([`780352b`](https://github.com/datajoint/element-array-ephys/commit/780352b51c3d002d6748bd54f61bd79169e07d95)) +* Update - remove PyPi release from `release.yml` since it will fail after the new `setup.py` -## v0.3.5 (2024-08-19) +## [0.3.2] - 2024-01-12 -### Fix +* Fix - `probe_geometry` bugfix for incorrect handling of probes with staggered electrode positions -* fix(spikeglx): minor bugfix ([`6764f8c`](https://github.com/datajoint/element-array-ephys/commit/6764f8c1adb9a80569f75233028e551cf58d8917)) +## [0.3.1] - 2023-11-28 -* fix: minor bugfix ([`e8870b9`](https://github.com/datajoint/element-array-ephys/commit/e8870b94cf6dc09b251e268c4102fb4b82149da2)) +* Update - Flowchart borders for consistency with other DataJoint Elements +* Fix - `dj.config()` setup moved to `tutorial_pipeline.py` instead of `__init__.py` +* Update - Elements installed directly from GitHub instead of PyPI +* Update - Structure of the tutorial notebook -* fix(probe): better handling of different Neuropixels probe types ([`aaec763`](https://github.com/datajoint/element-array-ephys/commit/aaec76339954b17a2dbef8aeaa84e92e64bdad35)) +## [0.3.0] - 2023-10-25 -* fix(probe_geometry): bugfix in x_coords for probe with staggered electrode positions ([`54d4fac`](https://github.com/datajoint/element-array-ephys/commit/54d4facd38a79ba9b6e40c01174fdb04e6dee43d)) +* Add - DevContainer for codespaces +* Add - `tutorial_pipeline.py` +* Add - 60 min tutorial using Jupyter Notebooks, short demo notebook +* Update - General improvements to increase consistency with other DataJoint Elements -### Unknown +## [0.2.11] - 2023-06-29 -* Merge pull request #199 from ttngu207/main +* Update - Improve kilosort triggering routine - better logging, remove temporary files, robust resumable processing +* Add - Null value for `package_version` to patch bug +* Update - GitHub Actions workflows +* Update - README instructions -fix(probe): better handling of different Neuropixels probe types and SpikeGLX meta loader ([`71d9a42`](https://github.com/datajoint/element-array-ephys/commit/71d9a42b28280d42b021b2e42d492f4918f07cd2)) +## [0.2.10] - 2023-05-26 -* update: version and changelog ([`f754392`](https://github.com/datajoint/element-array-ephys/commit/f75439241693f82e75927d23637a4ae471dd6377)) +* Add - Kilosort, NWB, and DANDI citations +* Fix - CSS to improve readability of tables in dark mode +* Update - mkdocs.yaml -* rearrange: explicitly call `probe.create_neuropixels_probe_types()` to create entries in ProbeType ([`46679e6`](https://github.com/datajoint/element-array-ephys/commit/46679e605e116e13a2cc373148ea24127a2fc447)) +## [0.2.9] - 2023-05-11 -* Merge branch 'dev_separated_create_probe' into nei_nienborg ([`7f52f59`](https://github.com/datajoint/element-array-ephys/commit/7f52f594ac4d24b2210cc3e2bee4adf0f4c3c913)) +* Fix - `.ipynb` dark mode output for all notebooks. -* Merge pull request #198 from BrainCOGS/adding-raw-strings-and-package-minimum +## [0.2.8] - 2023-04-28 -Fix regex patterns and add minimum version for scikit-image ([`27c56ea`](https://github.com/datajoint/element-array-ephys/commit/27c56ea92ba0c5d089a2c1e77cbffb52d51dcf6c)) +* Fix - `.ipynb` output in tutorials is not visible in dark mode. -* Added minimum version to the setup.py for scikit-image ([`711dd48`](https://github.com/datajoint/element-array-ephys/commit/711dd48a5396d1e7ba36410c5f141be6940e9c11)) +## [0.2.7] - 2023-04-19 -* Provided raw annotation for strings with unsupported escape regex sequences ([`f59b4ab`](https://github.com/datajoint/element-array-ephys/commit/f59b4abf3f1dae42990ef02cd3ff1e6c341aa861)) +* Bugfix - A name remapping dictionary was added to ensure consistency between the column names of the `metrics.csv` file and the attribute names of the `QualityMetrics` table -* Merge pull request #185 from datajoint/pytest +## [0.2.6] - 2023-04-17 -Add pytest ([`9299142`](https://github.com/datajoint/element-array-ephys/commit/9299142605c4b16c14edfe9a44f40f242f25839a)) +* Fix - Update Pandas DataFrame column name to insert `pt_ratio` in `QualityMetrics.Waveform` table -* apply black formatting ([`333f411`](https://github.com/datajoint/element-array-ephys/commit/333f4118ecbf3eee348fa3671b7da3249302167b)) +## [0.2.5] - 2023-04-12 -* update CHANGELOG.md & bump version ([`a3426eb`](https://github.com/datajoint/element-array-ephys/commit/a3426ebe9d9b03f61bef231c17032d5ad2e5c8cd)) +* Add - docstrings for quality metric tables +* Fix - docstring errors +* Update - `concepts.md` +* Update - schema diagrams with quality metrics tables -* tested version of pytest suite ([`9c033c4`](https://github.com/datajoint/element-array-ephys/commit/9c033c4f355a831dabb8537cfb12dba76c8badab)) +## [0.2.4] - 2023-03-10 -* switch ephys to no_curation in tutorial notebook ([`3ae2cbc`](https://github.com/datajoint/element-array-ephys/commit/3ae2cbcc558f17968403e00c983eca1049e51721)) +* Update - Requirements with `ipywidgets` and `scikit-image` for plotting widget -* move tutorial_pipeline.py to tests ([`591a0ed`](https://github.com/datajoint/element-array-ephys/commit/591a0ed0857601517a62acd73967c726269e5eb2)) +## [0.2.3] - 2023-02-14 -* setup pytest fixture ([`92026a6`](https://github.com/datajoint/element-array-ephys/commit/92026a614ae61b874d4a4692acc5fc0ad06bd560)) +* Add - extras_require install options for nwb and development requirement sets +* Add - mkdocs notebook rendering +* Add - markdown linting and spellcheck config files, with implementation edits +* Update - license for 2023 +* Update - blackify previous updates -* Merge pull request #183 from MilagrosMarin/main +## [0.2.2] - 2022-01-11 -Minor change: remove pypi release from `release.yaml` ([`cc36465`](https://github.com/datajoint/element-array-ephys/commit/cc36465dc56a9e299a86e54329e806899c6bcf73)) +* Bugfix - Revert import order in `__init__.py` to avoid circular import error. +* Update - `.pre-commit-config.yaml` to disable automatic positioning of import + statement at the top. +* Bugfix - Update docstrings to render API for documentation website. -* update version and changelog ([`5cfc269`](https://github.com/datajoint/element-array-ephys/commit/5cfc26921633e89df5fb16637dd61b88361d73d7)) +## [0.2.1] - 2022-01-06 -* remove PyPi release ([`dc7863e`](https://github.com/datajoint/element-array-ephys/commit/dc7863edde2431114db8992cb419782b28eaa3ce)) +* Add - `build_electrode_layouts` function in `probe.py` to compute the electrode layout + for all types of probes. +* Update - parameterize run_CatGT step from parameters retrieved from + `ClusteringParamSet` table +* Update - clustering step, update duration for "median_subtraction" step +* Bugfix - handles single probe recording in "Neuropix-PXI" format +* Update - safeguard in creating/inserting probe types upon probe activation +* Add - quality control metric dashboard +* Update & fix docstrings +* Update - `ephys_report.UnitLevelReport` to add `ephys.ClusterQualityLabel` as a + foreign key reference +* Add - `.pre-commit-config.yaml` -* Merge pull request #182 from datajoint/staging +## [0.2.0] - 2022-10-28 -fix(probe_geometry): bugfix in x_coords for probe with staggered electrode positions ([`95d25f2`](https://github.com/datajoint/element-array-ephys/commit/95d25f2f76e3e8435eb3c8b199437df581aa3916)) +* Add - New schema `ephys_report` to compute and store figures from results +* Add - Widget to display figures +* Add - Add `ephys_no_curation` and routines to trigger spike-sorting analysis + using Kilosort (2.0, 2.5) +* Add - mkdocs for Element Documentation +* Add - New `QualityMetrics` table to store clusters' and waveforms' metrics after the + spike sorting analysis. -* Merge pull request #181 from ttngu207/main +## [0.1.4] - 2022-07-11 -fix(probe_geometry): bugfix in x_coords for probe with staggered electrode positions ([`d65b70f`](https://github.com/datajoint/element-array-ephys/commit/d65b70fd56c89d851b8c29819fe70a219cb81838)) +* Bugfix - Handle case where `spike_depths` data is present. -* update(version): update CHANGELOG and version ([`2e79f3d`](https://github.com/datajoint/element-array-ephys/commit/2e79f3d984272ce97709e7c83bb153ab6a2a452a)) +## [0.1.3] - 2022-06-16 -## v0.3.1 (2024-01-04) +* Update - Allow for the `precluster_output_dir` attribute to be nullable when no + pre-clustering is performed. -### Unknown +## [0.1.2] - 2022-06-09 -* Merge pull request #178 from MilagrosMarin/main +* Bugfix - Handle case where `pc_features.npy` does not exist. -Update CHANGELOG and `version.py` ([`6184b2f`](https://github.com/datajoint/element-array-ephys/commit/6184b2fa51db9e430967bbb618b15c0b65549613)) +## [0.1.1] - 2022-06-01 -* CHANGELOG and `version.py` updated ([`bfc0c0a`](https://github.com/datajoint/element-array-ephys/commit/bfc0c0a7e31903cf2f74d5675f06a896e4705769)) +* Add - Secondary attributes to `PreClusterParamSteps` table -* Merge pull request #176 from kushalbakshi/main +## [0.1.0] - 2022-05-26 -Update setup dependencies + tutorial setup + fix diagram ([`0174478`](https://github.com/datajoint/element-array-ephys/commit/01744781b27ee4cad16c18bb2e6a1fea175e038c)) +* Update - Rename module for acute probe insertions from `ephys.py` to `ephys_acute.py`. +* Add - Module for pre-clustering steps (`ephys_precluster.py`), which is built off of + `ephys_acute.py`. +* Add - Module for chronic probe insertions (`ephys_chronic.py`). +* Bugfix - Missing `fileTimeSecs` key in SpikeGLX meta file. +* Update - Move common functions to `element-interface` package. +* Add - NWB export function -* Minor fixes and updates to notebook ([`1ea7c89`](https://github.com/datajoint/element-array-ephys/commit/1ea7c89993465eaa34e02864f654c654b9e7285c)) +## [0.1.0b4] - 2021-11-29 -* Fix typo in setup.py ([`b919ca3`](https://github.com/datajoint/element-array-ephys/commit/b919ca34432c3189d934a3a75bdb071fe8bcb6b9)) +* Add - Processing with Kilosort and pyKilosort for Open Ephys and SpikeGLX -* Black formatting ([`16d36d5`](https://github.com/datajoint/element-array-ephys/commit/16d36d56fad9e0e5b97af2fa57df065a664917cf)) +## [0.1.0b0] - 2021-05-07 -* Move dj_config setup to `tutorial_pipeline.py` ([`0dbdde7`](https://github.com/datajoint/element-array-ephys/commit/0dbdde70a27054f5489399daca4743f40c34ce29)) +* Update - First beta release -* Remove PyPI versioning in setup ([`b979fec`](https://github.com/datajoint/element-array-ephys/commit/b979feca44468a33601af36c1db3f917993844df)) +## [0.1.0a5] - 2021-05-05 -* Markdown structural edits ([`b2c4901`](https://github.com/datajoint/element-array-ephys/commit/b2c4901095c6d16b43302bbb4da43a0ad813bc6f)) +* Add - GitHub Action release process +* Add - `probe` and `ephys` elements +* Add - Readers for: `SpikeGLX`, `Open Ephys`, `Kilosort` +* Add - Probe table supporting: Neuropixels probes 1.0 - 3A, 1.0 - 3B, 2.0 - SS, + 2.0 - MS -* Run tutorial notebook ([`43ff0d4`](https://github.com/datajoint/element-array-ephys/commit/43ff0d42a1abe89f6bf3f3f334b32773ca0d2c37)) - -* Minor fixes to README ([`da2239d`](https://github.com/datajoint/element-array-ephys/commit/da2239dbc06dc018f78823c6c36a7f83ff48a5d4)) - -* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`9a8865f`](https://github.com/datajoint/element-array-ephys/commit/9a8865f74ec9d4c1e5b661d04bd3f54932ac53e4)) - -* Update path to flowchart diagram ([`f424a0f`](https://github.com/datajoint/element-array-ephys/commit/f424a0f7726e8dcfafe99b2f053af194d856a536)) - -* Fix heading styling ([`6323d4e`](https://github.com/datajoint/element-array-ephys/commit/6323d4ee7ac4cd07b494ce93cca8ac159c0bf843)) - -* Minor updates for formatting and structure ([`8717528`](https://github.com/datajoint/element-array-ephys/commit/87175289eb3a0aa1d401bb41f5b9f7e73c62659a)) - -* Added diagram_flowchart.svg ([`bf63fe1`](https://github.com/datajoint/element-array-ephys/commit/bf63fe1b6887075a123de99a5b00fd20f9ee9561)) - -* Added diagram_flowchart.svg ([`4a4104c`](https://github.com/datajoint/element-array-ephys/commit/4a4104c92a370162a4cb51642d337b3567ef28fe)) - -* Merge branch 'datajoint:main' into codespace ([`b66ccee`](https://github.com/datajoint/element-array-ephys/commit/b66cceeda3305c4af75befaaee9e91aa2704bb19)) - -* Update diagram_flowchart.drawio ([`4f58c68`](https://github.com/datajoint/element-array-ephys/commit/4f58c68562820c583ac04fccd8a394e729adafdb)) - -* Merge pull request #175 from A-Baji/main - -revert: :memo: revert docs dark mode cell text color ([`d24f936`](https://github.com/datajoint/element-array-ephys/commit/d24f936b15cfbb138a4490d3cec3fbd1a5a84e69)) - -* revert: :memo: revert table style ([`62172c4`](https://github.com/datajoint/element-array-ephys/commit/62172c459c643e66b559c7c4af1943901a890a89)) - -* Merge pull request #174 from MilagrosMarin/update_tutorial - -Improvements in `element-array-ephys` tutorial and README ([`fe4a844`](https://github.com/datajoint/element-array-ephys/commit/fe4a8444ef213fd70625053ec776d6081ac695c6)) - -* tutorial run with included outputs complete ([`2a59ea0`](https://github.com/datajoint/element-array-ephys/commit/2a59ea0e19ceb11ae43892f7309a119a3ffdfa57)) - -* Tutorial run with included outputs ([`40eec3e`](https://github.com/datajoint/element-array-ephys/commit/40eec3e8441998970e16ebb34f375f3e6647fd8d)) - -* Revert deleting `SessionDirectory` insertion ([`ad69298`](https://github.com/datajoint/element-array-ephys/commit/ad692986c3ba73f0885f401e8114bd66e17c4826)) - -* add markdown in setup ([`abc82ba`](https://github.com/datajoint/element-array-ephys/commit/abc82ba215d67912482981fe2b2766e7a4bccff8)) - -* ephys tutorial preliminary review to mirror dlc ([`9e16a23`](https://github.com/datajoint/element-array-ephys/commit/9e16a23ff1c8ab7002a013ab4bf4057cd9902253)) - -* Merge pull request #173 from kushalbakshi/codespace - -Add DevContainers + Codespaces tutorial ([`140384e`](https://github.com/datajoint/element-array-ephys/commit/140384ee293a366d03900e490fe03413b7d8531b)) - -* review PR tutorial ([`733d2b1`](https://github.com/datajoint/element-array-ephys/commit/733d2b1ee9702a6d4e391b6ce62e373534ffdd0a)) - -* Fix typo in tutorial heading ([`820b282`](https://github.com/datajoint/element-array-ephys/commit/820b282e29eab4793c56ab99ac29ac897f3bdd33)) - -* Merge branch 'codespace' of https://github.com/kushalbakshi/element-array-ephys into codespace ([`a993b8d`](https://github.com/datajoint/element-array-ephys/commit/a993b8d99a964c1cfb599bc7411f1ff27a0c7c9b)) - -* Updated diagram_flowchart.svg ([`a376c90`](https://github.com/datajoint/element-array-ephys/commit/a376c90a6f495f84af9611b8e8293272892c3c29)) - -* Fix typo ([`d1657b2`](https://github.com/datajoint/element-array-ephys/commit/d1657b2c57c1d8733cb88bce326511679a575fe3)) - -* Update README + minor fixes ([`b9fd4a3`](https://github.com/datajoint/element-array-ephys/commit/b9fd4a35f19587ea6527d968bb6b42e5afa880b2)) - -* Update diagram_flowchart.drawio ([`a08736c`](https://github.com/datajoint/element-array-ephys/commit/a08736cd58b2c43bdeb6e9d0d35b69ce818174f8)) - -* Update diagram_flowchart.svg ([`f9fc3ec`](https://github.com/datajoint/element-array-ephys/commit/f9fc3ec002160fb0bfe36c579019637bd6ab285e)) - -* Updated diagram_flowchart.svg ([`bb2f507`](https://github.com/datajoint/element-array-ephys/commit/bb2f507704b7b15bbe0f887dffee0c98018828e3)) - -* Update diagram_flowchart.drawio ([`6328398`](https://github.com/datajoint/element-array-ephys/commit/632839825214c6c66baa29cd7136bc0bc46f0f3a)) - -* Complete demo notebooks ([`21fde13`](https://github.com/datajoint/element-array-ephys/commit/21fde1351084c6f73751dd47f0024c9b9e6487ad)) - -* Black formatting ([`5d57ff2`](https://github.com/datajoint/element-array-ephys/commit/5d57ff2c1d12963391b3318d091a0ac1ca66db6d)) - -* Update demo presentation notebook ([`1da15db`](https://github.com/datajoint/element-array-ephys/commit/1da15dbd1bc9d759fc2c3cf1dd6e1388f5645fad)) - -* Add demo notebooks ([`d02d8a5`](https://github.com/datajoint/element-array-ephys/commit/d02d8a585b575ea60b01caae953df4176a40de01)) - -* Completed tutorial ([`7bf9f9f`](https://github.com/datajoint/element-array-ephys/commit/7bf9f9f3cf80f32963ed421221a4cd405aef6dd8)) - -* Update root_data_dir in Dockerfile ([`d5430aa`](https://github.com/datajoint/element-array-ephys/commit/d5430aa93b507baf4923acda3d3eb8663e480a23)) - -* Update Dockerfile and tutorial_pipeline to fix errors ([`1717054`](https://github.com/datajoint/element-array-ephys/commit/1717054c4eefb1176be2a54bc9e75b80508b62a0)) - -* Use `session_with_datetime` for tutorial ([`ce6e3bf`](https://github.com/datajoint/element-array-ephys/commit/ce6e3bf8ee3968f5b1d1b97e1d7d238272b6c073)) - -* Update `get_logger` to `dj.logger` ([`b2180c4`](https://github.com/datajoint/element-array-ephys/commit/b2180c457e86303ac816bd0acb94c99fb1097821)) - -* Markdown improvements in tutorial ([`38c50fb`](https://github.com/datajoint/element-array-ephys/commit/38c50fbaad8cc2a3d3d717ed4c2d5a577fa908e9)) - -* Upsdate tutorial markdown ([`4190925`](https://github.com/datajoint/element-array-ephys/commit/41909257e34b37c9197943dc75d855c31f9cda89)) - -* Merge branch 'codespace' of https://github.com/kushalbakshi/element-array-ephys into codespace ([`69cef22`](https://github.com/datajoint/element-array-ephys/commit/69cef2204070e258e40e7ef43ba65200be3d560f)) - -* Update `.gitignore` to include Codespaces ([`f5ab71d`](https://github.com/datajoint/element-array-ephys/commit/f5ab71d8abfcfe973d9792e91307ed705d56f54b)) - -* Update root data dir env ([`1bea230`](https://github.com/datajoint/element-array-ephys/commit/1bea230d0789be6632c8dbb78139d9a2b8f92421)) - -* Add tutorial notebook ([`caf5c91`](https://github.com/datajoint/element-array-ephys/commit/caf5c9109d43e373c262d3757c0bc3edd54d416f)) - -* Allow build step in docker-compose ([`dcd768a`](https://github.com/datajoint/element-array-ephys/commit/dcd768a0e7bc799b9da968ed8831738d1facbee1)) - -* Black formatting ([`6c6afe4`](https://github.com/datajoint/element-array-ephys/commit/6c6afe4778466b26dbf846c84d1d77daf8672ca7)) - -* Enable devcontainer builds in CICD ([`5e2d7be`](https://github.com/datajoint/element-array-ephys/commit/5e2d7bef950f70007dc418c9975c22b3488c95a1)) - -* First commit for codespace compatability ([`5f756c3`](https://github.com/datajoint/element-array-ephys/commit/5f756c36675e5191f2b19d2581dcf1a4b0991729)) - -* Merge pull request #169 from ttngu207/new_spikeglx_and_probeinterface - -New spikeglx and probeinterface ([`3b8efe5`](https://github.com/datajoint/element-array-ephys/commit/3b8efe52fcc16eae13d918a11dc5c1e89378c93e)) - -* address PR comments - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`f42f1fc`](https://github.com/datajoint/element-array-ephys/commit/f42f1fcff03f0c312d8e09ea50828bf2a77b33b5)) - -* address PR comments - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`452ff31`](https://github.com/datajoint/element-array-ephys/commit/452ff31c952f641f329771c22c39a4e6845d7588)) - -* bugfix ([`4407678`](https://github.com/datajoint/element-array-ephys/commit/44076782dcb86af3309fe2bda909d971f9819266)) - -* bugfix in geomap to shank conversion ([`1514613`](https://github.com/datajoint/element-array-ephys/commit/1514613fb9b74c5be628f2d5e53882ea6f4e7da1)) - -* transform geom map to shank map ([`9857aef`](https://github.com/datajoint/element-array-ephys/commit/9857aef544ebabc84e4906b421420bc2407b55a6)) - -* update spikeglx loader to handle spikeglx ver 20230202 ([`3994fc7`](https://github.com/datajoint/element-array-ephys/commit/3994fc75b10f6d5a92e6c7b664067641716d518e)) - -* incorporate probeinterface and probe geometry for all npx probes ([`224b1c7`](https://github.com/datajoint/element-array-ephys/commit/224b1c7049c9e246df22fe3a46fbd357d3096d8b)) - -## v0.2.11 (2023-06-30) - -### Unknown - -* Merge pull request #165 from kabilar/patch - -Patch issue with nullable attributes ([`e4dd98a`](https://github.com/datajoint/element-array-ephys/commit/e4dd98a3541271e041538daa053f158c1b9f8c63)) - -* Temporarily remove Docker image builds ([`48a1e76`](https://github.com/datajoint/element-array-ephys/commit/48a1e768ad6b8cf05bf519cdcbf0e503aa73e613)) - -* Format with black ([`d5de596`](https://github.com/datajoint/element-array-ephys/commit/d5de59661c21992dbc9104f6ce8ca9c26e64cc91)) - -* Update image path ([`2557877`](https://github.com/datajoint/element-array-ephys/commit/25578773db6a478c54486ebcfc9010d7c23fa87e)) - -* Update text ([`8a764e8`](https://github.com/datajoint/element-array-ephys/commit/8a764e85f0645fb38d2d74f87fdfb73260bb2524)) - -* Update readme ([`c530671`](https://github.com/datajoint/element-array-ephys/commit/c5306715508891428e69203172091664c6d34c7a)) - -* Update changelog ([`d1cf13f`](https://github.com/datajoint/element-array-ephys/commit/d1cf13f8595c0fe6d90a0dd029a65b73d8ecec4a)) - -* Update GitHub Actions ([`71bb8e2`](https://github.com/datajoint/element-array-ephys/commit/71bb8e2a489d044a01c4328027630c5f8f34b6cf)) - -* Update version and changelog ([`d4f7fe0`](https://github.com/datajoint/element-array-ephys/commit/d4f7fe080eb5fe94f518c2db5b17ffef4448dee2)) - -* Add default value ([`01ad1e8`](https://github.com/datajoint/element-array-ephys/commit/01ad1e85a3e2db48c198b2d0d32096152ffba295)) - -* Merge pull request #142 from ttngu207/main - -Update kilosort_triggering.py ([`1d30cb8`](https://github.com/datajoint/element-array-ephys/commit/1d30cb81c258d396aba16a38ed20fbfb0e55a052)) - -* update CHANGELOG ([`5e1f055`](https://github.com/datajoint/element-array-ephys/commit/5e1f0555349a11b5b51498d13e209a28781c1b11)) - -* Merge branch 'datajoint:main' into main ([`c5f20b0`](https://github.com/datajoint/element-array-ephys/commit/c5f20b0063363e4946ee059838db34c0dc3c57ac)) - -## v0.2.10 (2023-05-26) - -### Unknown - -* Merge pull request #151 from kabilar/main - -Fix readability of tables in dark mode ([`47dea95`](https://github.com/datajoint/element-array-ephys/commit/47dea95466cb771f19807c8e0499efc5e3f2f577)) - -* Update citation ([`100913e`](https://github.com/datajoint/element-array-ephys/commit/100913e772c64bc482fde088842e184972ba479f)) - -* Update changelog ([`0bfca62`](https://github.com/datajoint/element-array-ephys/commit/0bfca629a963196470f4dc291ba1678b58a2829c)) - -* Update CSS ([`15e9ddb`](https://github.com/datajoint/element-array-ephys/commit/15e9ddb4e8fe21f078b623c5eb64131f6255d82a)) - -* Merge pull request #150 from kabilar/main - -Add Kilosort, NWB, and DANDI citations ([`ad9588f`](https://github.com/datajoint/element-array-ephys/commit/ad9588fb1c1293d1b8e598f573b280f06ba4e750)) - -* Add NWB and DANDI citations ([`af81ef9`](https://github.com/datajoint/element-array-ephys/commit/af81ef973859a0eea890d3c6ff513640a9255e16)) - -* Update citation page ([`aee35f7`](https://github.com/datajoint/element-array-ephys/commit/aee35f7918d4d9c66fa97d8e1795b66f43e58996)) - -* Update changelog ([`0ca91fa`](https://github.com/datajoint/element-array-ephys/commit/0ca91fa11ebf11b7dcbe600c628f63fc8c32078c)) - -* Update changelog ([`f89eae4`](https://github.com/datajoint/element-array-ephys/commit/f89eae42128c2ca3f068aecf05d04ca214f40282)) - -* Add plugin ([`4436b05`](https://github.com/datajoint/element-array-ephys/commit/4436b056d3408af20d448068f5ed6d27b8486465)) - -* Remove redirects ([`c798564`](https://github.com/datajoint/element-array-ephys/commit/c798564673444260e2a1094bf2cfd615203283da)) - -* Update changelog ([`b63031c`](https://github.com/datajoint/element-array-ephys/commit/b63031cf788fe075b410cc23f15d8efc90819895)) - -* Add citation ([`69e76dd`](https://github.com/datajoint/element-array-ephys/commit/69e76dd58e9785f3d4f93e78c3c7b86006e1eae4)) - -* Merge pull request #149 from kushalbakshi/main - -Fix notebook output in dark mode ([`97a57b1`](https://github.com/datajoint/element-array-ephys/commit/97a57b158b116cf8d0b6e84ecc1fe737f3176366)) - -## v0.2.9 (2023-05-11) - -### Unknown - -* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`e4809ba`](https://github.com/datajoint/element-array-ephys/commit/e4809ba249f3885ff578123eba6e479ad672a9f0)) - -* Merge pull request #148 from kushalbakshi/main - -Fix docs tutorials in dark mode ([`96d1187`](https://github.com/datajoint/element-array-ephys/commit/96d118777d5a0f736ea2ca224eb90526d7637616)) - -* Dark mode notebooks fix ([`9aab33d`](https://github.com/datajoint/element-array-ephys/commit/9aab33da951bcddb8515f91ade88aec783631113)) - -## v0.2.8 (2023-04-28) - -### Unknown - -* Fix docs tutorials in dark mode ([`d2367ce`](https://github.com/datajoint/element-array-ephys/commit/d2367ce4e430273ada830d9c6f4eefdde66c2637)) - -* Merge pull request #146 from JaerongA/metrics - -Remap `metrics.csv` column names ([`6aef807`](https://github.com/datajoint/element-array-ephys/commit/6aef8074a16af945c4c6d928bf480daa6e4d1401)) - -## v0.2.7 (2023-04-19) - -### Unknown - -* update changelog and version ([`6b069e6`](https://github.com/datajoint/element-array-ephys/commit/6b069e68efbe933c1324a7afbd10276732c3b49e)) - -* add column name mapping for metrics.csv ([`c97d509`](https://github.com/datajoint/element-array-ephys/commit/c97d5090c445ed6c4f8595cf64dfb59eb965545e)) - -## v0.2.6 (2023-04-18) - -### Unknown - -* Merge pull request #143 from kabilar/main - -Update version and changelog for release ([`5abecc3`](https://github.com/datajoint/element-array-ephys/commit/5abecc3ccf0a3e72e5807e67a1a11d875953450e)) - -* Update `ephys_precluster` ([`5993d6e`](https://github.com/datajoint/element-array-ephys/commit/5993d6eeb1e3ab57d1cca96a47e32440170c0477)) - -* Update version and changelog ([`e9b66af`](https://github.com/datajoint/element-array-ephys/commit/e9b66aff50f78715420518ec5470ec1e1435abaf)) - -* Merge `main` of datajoint/element-array-ephys ([`40b5a6d`](https://github.com/datajoint/element-array-ephys/commit/40b5a6df8c884ee36615d3b20fd2f838ac405062)) - -* Merge pull request #144 from JaerongA/main ([`e487f3a`](https://github.com/datajoint/element-array-ephys/commit/e487f3a2083f3c25bbe160eba1ac59d4707e3793)) - -* lowercase all column names in metrics.csv ([`f35ba0b`](https://github.com/datajoint/element-array-ephys/commit/f35ba0b383efb8d59448eb6220a6a4dab153f41d)) - -* Merge pull request #138 from JaerongA/main - -Update docs for quality metrics ([`aabc454`](https://github.com/datajoint/element-array-ephys/commit/aabc45420eaead26966309ecedbfec513e89a771)) - -## v0.2.5 (2023-04-13) - -### Unknown - -* remove schema tag in SkullReference ([`7192958`](https://github.com/datajoint/element-array-ephys/commit/7192958dcb188f6f72c363eefa333786acfd0216)) - -* add a new tag ([`b8ef2d9`](https://github.com/datajoint/element-array-ephys/commit/b8ef2d9d2069cce2e0641ea63dd461b60634a39b)) - -* update schema diagrams to show SkullReference ([`8ffe6df`](https://github.com/datajoint/element-array-ephys/commit/8ffe6dfb9dbe9ee1b421cb05472daf07c1a1428e)) - -* Update CHANGELOG.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`166c00a`](https://github.com/datajoint/element-array-ephys/commit/166c00a20a48805cbc3fe8c3dc300b7c9bd8a7ae)) - -* Update docs/src/concepts.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`4a304d8`](https://github.com/datajoint/element-array-ephys/commit/4a304d88d77d0f7395916881c0110019ce72d803)) - -* bump version ([`7ceae8b`](https://github.com/datajoint/element-array-ephys/commit/7ceae8bac60409d50fd105c9ccd13177ff5f4339)) - -* add schema diagrams ([`eb5b0b1`](https://github.com/datajoint/element-array-ephys/commit/eb5b0b10b7efc98c2aa47ebd8c709758bfa6bfea)) - -* add quality_metrics.ipynb to mkdocs ([`977b90a`](https://github.com/datajoint/element-array-ephys/commit/977b90a80dec5034aa78eb90f9810df8b0ff942b)) - -* Update docs/src/concepts.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`27d5742`](https://github.com/datajoint/element-array-ephys/commit/27d57420e6ce7107c1b3b73acb5ac436c0155a4a)) - -* Update element_array_ephys/ephys_report.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`1e37791`](https://github.com/datajoint/element-array-ephys/commit/1e3779182767b699faac78e952a1e4e64f4e2854)) - -* Update requirements.txt - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`46b911b`](https://github.com/datajoint/element-array-ephys/commit/46b911b6df8a49551e5042b5d422135b4585ffc7)) - -* add nbformat to requirements.txt ([`2f38cfa`](https://github.com/datajoint/element-array-ephys/commit/2f38cfa099ab62561c6dbbfdbd6e5ea233de233d)) - -* update requirements.txt ([`eb0d795`](https://github.com/datajoint/element-array-ephys/commit/eb0d7955f53879c511439b77d1f5561a36049e85)) - -* update concepts.md ([`d362792`](https://github.com/datajoint/element-array-ephys/commit/d362792891e9e4c1e3b5f604d070d32cb8e24a23)) - -* fix docstring error in qc.py ([`ea03674`](https://github.com/datajoint/element-array-ephys/commit/ea036742dc28421bad116e897055b867eff6c54a)) - -* add docstring for qc metric tables ([`7a89a7f`](https://github.com/datajoint/element-array-ephys/commit/7a89a7fc5819fa205bf332155f684d9b94529133)) - -* fix docstring error ([`9cc4545`](https://github.com/datajoint/element-array-ephys/commit/9cc4545681ec3da7d0a1b1230c06e12fbdd2e409)) - -* Fix typo ([`e0919ae`](https://github.com/datajoint/element-array-ephys/commit/e0919ae14670b1cce1341d216afe08c990fa1285)) - -* Update docs configuration ([`2dcca99`](https://github.com/datajoint/element-array-ephys/commit/2dcca99e0ed3d4e1d7ea14b44686f8d6a7f0bd5e)) - -* Fix for `PT_ratio` to `pt_ratio` ([`e4358a5`](https://github.com/datajoint/element-array-ephys/commit/e4358a50c152d940c663b691b74342570f518c30)) - -* Update kilosort_triggering.py ([`8035648`](https://github.com/datajoint/element-array-ephys/commit/8035648551e6545cd66f2f4d5911241226430302)) - -## v0.2.4 (2023-03-10) - -### Unknown - -* Merge pull request #137 from kabilar/main - -Update requirements for plotting widget ([`2fa46bd`](https://github.com/datajoint/element-array-ephys/commit/2fa46bd690c6bf4a22494dc2086c506f5ba48bf8)) - -* Update changelog ([`2eb359f`](https://github.com/datajoint/element-array-ephys/commit/2eb359f354b55235ae350dc89cf4f918449764c2)) - -* Update changelog ([`ac581b9`](https://github.com/datajoint/element-array-ephys/commit/ac581b9eafdb2848aecb2691509038ed9a9c9c13)) - -* Add dependency ([`00f4f6d`](https://github.com/datajoint/element-array-ephys/commit/00f4f6d9693e2c470b46e55341ee54f5c2f36df2)) - -* Add ipywidgets as dependency ([`6840069`](https://github.com/datajoint/element-array-ephys/commit/684006910b979b70fa7c9d9a5614a570da1159d2)) - -## v0.2.3 (2023-02-14) - -### Fix - -* fix: :bug: import from __future__ module - -this supports backward compability with typing ([`4d9ab28`](https://github.com/datajoint/element-array-ephys/commit/4d9ab28609b161be986cda2872778d7a403277f3)) - -### Unknown - -* Merge pull request #132 from CBroz1/main - -Add 0.2.3 release date ([`e39f9d3`](https://github.com/datajoint/element-array-ephys/commit/e39f9d30e23052f4d1bbf85bc7ca459d80e641c2)) - -* Add 0.2.3 release date ([`eaeef30`](https://github.com/datajoint/element-array-ephys/commit/eaeef306827799448050f0e96fa7d2c03ece750b)) - -* Merge pull request #125 from CBroz1/main - -Adjusting pyopenephys requirement for pypi publication ([`c7f92af`](https://github.com/datajoint/element-array-ephys/commit/c7f92af01da6e975bb25e63a472825e8491f1057)) - -* New pyopenephys version ([`f07bd44`](https://github.com/datajoint/element-array-ephys/commit/f07bd44abcaa4ac88cded20168b4e8f63b451563)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`905588f`](https://github.com/datajoint/element-array-ephys/commit/905588f5a3f03ae813a4ff25e202aa69b9e47ec7)) - -* Merge pull request #128 from ttngu207/main - -fix removing outdated files/directory ([`53f0312`](https://github.com/datajoint/element-array-ephys/commit/53f0312272ffbf7989c40b2ba91c9e7a33a81faa)) - -* Update kilosort_triggering.py ([`a34e437`](https://github.com/datajoint/element-array-ephys/commit/a34e437380868498975735e97561bb406a0cc70f)) - -* fix removing outdated files/directory ([`6fb65b3`](https://github.com/datajoint/element-array-ephys/commit/6fb65b376c25bd35dd4e665519c20e3ce8f33f4d)) - -* minor improvement ([`7c6dc37`](https://github.com/datajoint/element-array-ephys/commit/7c6dc374184a90e3211cef63bf0a4c31dd7a35fc)) - -* Merge pull request #126 from sidhulyalkar/main - -Fix multiple hash kilosort output issue ([`b619bd0`](https://github.com/datajoint/element-array-ephys/commit/b619bd05d77246808d61627eacaa4e6cad4aa69a)) - -* edit comment ([`f17f299`](https://github.com/datajoint/element-array-ephys/commit/f17f299f8a379b2e6de67c27c51e518d692f0f56)) - -* Fix issue where hash is changed(different paramset) and trying to rerun processing ([`7774492`](https://github.com/datajoint/element-array-ephys/commit/7774492e21fd361560393e5d2bde906adb256e37)) - -* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`152cc58`](https://github.com/datajoint/element-array-ephys/commit/152cc586e294de185aa799e50af9717b4b6948bf)) - -* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`2df6280`](https://github.com/datajoint/element-array-ephys/commit/2df6280b09c36ace534b11726220140afc6d2431)) - -* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`06568f4`](https://github.com/datajoint/element-array-ephys/commit/06568f445a8bc8da1b0c34eb457899626de75dba)) - -* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`e7f6060`](https://github.com/datajoint/element-array-ephys/commit/e7f6060467f02510558e7b86e9e664c7cbdbc38d)) - -* Merge branch 'run_kilosort' of https://github.com/sidhulyalkar/element-array-ephys ([`4e195c3`](https://github.com/datajoint/element-array-ephys/commit/4e195c3b173cfdb94371311c1d6be6babad7b75c)) - -* Merge branch 'main' of https://github.com/sidhulyalkar/element-array-ephys ([`f5ca7e8`](https://github.com/datajoint/element-array-ephys/commit/f5ca7e87ad8bc51c0801c7b2504e0fb8092a3a08)) - -* Added Code of Conduct ([`195c61e`](https://github.com/datajoint/element-array-ephys/commit/195c61e8e825d90701b84b1c18fa204fa56c8bc3)) - -* Simplify import ([`47f6a07`](https://github.com/datajoint/element-array-ephys/commit/47f6a07ad030dff9997272a4f74abe7962593277)) - -* pyopenephys import workaround ([`2a742e6`](https://github.com/datajoint/element-array-ephys/commit/2a742e694326d2937a07587469730110f1f11b39)) - -* Merge pull request #124 from CBroz1/main - -Cleanup docstrings, add notebook render ([`d5b9586`](https://github.com/datajoint/element-array-ephys/commit/d5b95860977485e1020500d72f2bf18576a18aad)) - -* Apply suggestions from code review - -Co-authored-by: JaerongA <jaerong.ahn@datajoint.com> ([`4796056`](https://github.com/datajoint/element-array-ephys/commit/4796056c1bdb98039cc52e43a4491dc43f6bcfef)) - -* Merge pull request #3 from JaerongA/main - -fix: :bug: import from __future__ to support backward compatibility ([`fd94939`](https://github.com/datajoint/element-array-ephys/commit/fd94939eb518e8727706f3dff56ebc35bb3fcb5f)) - -* Merge branch 'main' into main ([`084ada2`](https://github.com/datajoint/element-array-ephys/commit/084ada258f9935f6d3636fd31c1a962b3be0a9aa)) - -* Adjust dependencies 2 ([`a28cf13`](https://github.com/datajoint/element-array-ephys/commit/a28cf13118f15fbec515171852027965b8c433ad)) - -* Adjust dependencies ([`45f846c`](https://github.com/datajoint/element-array-ephys/commit/45f846cf75004b9e5ea6e6580f6f16209515c6f0)) - -* Fix typing bug ([`888e7f7`](https://github.com/datajoint/element-array-ephys/commit/888e7f743fcfc10dc190c3020523aeb1547c8380)) - -* interface requirement to pip installable ([`9ff2e04`](https://github.com/datajoint/element-array-ephys/commit/9ff2e04cb879fe8aae0fe985a661f7fe8761f79b)) - -* add extras_require nwb install option to docs ([`8045879`](https://github.com/datajoint/element-array-ephys/commit/80458796ebda66e81312211677a0f4aa93100295)) - -* Version bump, changelog ([`9d03350`](https://github.com/datajoint/element-array-ephys/commit/9d0335047fe67e02ebf5ccc6da395d5bdbeda3df)) - -* Add extras_require for dev and nwb ([`e01683c`](https://github.com/datajoint/element-array-ephys/commit/e01683ca2241e457ccfa6f4d61914808400d663e)) - -* Spelling ([`56eb68a`](https://github.com/datajoint/element-array-ephys/commit/56eb68a96f1552aaeed967d4c46ccad45c8eabcd)) - -* Fix docstrings ([`0980242`](https://github.com/datajoint/element-array-ephys/commit/0980242d1502d09612b424ce7b9f06a250d11342)) - -* Add tutorial notebook renders ([`96bb6fa`](https://github.com/datajoint/element-array-ephys/commit/96bb6fa10207782e2d5249eda76007ebc453567d)) - -* More spellcheck; Markdown linting ([`64e7dc6`](https://github.com/datajoint/element-array-ephys/commit/64e7dc690c7cbf1f4051d4cbd3e5d29f3bfd9218)) - -* Update License 2023 ([`4ef0b6d`](https://github.com/datajoint/element-array-ephys/commit/4ef0b6db9a27fd7ee68fcc48744ee98981947156)) - -* Spellcheck pass ([`ea980e9`](https://github.com/datajoint/element-array-ephys/commit/ea980e9d35a7582ff1953651812e61a736931c9d)) - -* Remove unused import ([`b3c0786`](https://github.com/datajoint/element-array-ephys/commit/b3c0786329d6c60c00e092aa0f3a10e920970c20)) - -* blackify ([`0e5a1c6`](https://github.com/datajoint/element-array-ephys/commit/0e5a1c64d8e7913cdb70ef701c5e53a83225fbcf)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`b250f2d`](https://github.com/datajoint/element-array-ephys/commit/b250f2dd15395876196029789667250cf331a6ee)) - -* Merge branch 'main' of https://github.com/JaerongA/element-array-ephys ([`147550c`](https://github.com/datajoint/element-array-ephys/commit/147550c6fee8a6d510d877be627025f0c710aba8)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`06db0f8`](https://github.com/datajoint/element-array-ephys/commit/06db0f84f2b5a686a3fe87ef14d3be196e7861b3)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`0ec5e3d`](https://github.com/datajoint/element-array-ephys/commit/0ec5e3d135f62013083ce2799db927e23ad1d10e)) - -* Add nwb.py from run_kilosort branch ([`31e46cd`](https://github.com/datajoint/element-array-ephys/commit/31e46cd30f0467f43aca18ca450ec72667656417)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`5711a12`](https://github.com/datajoint/element-array-ephys/commit/5711a12ac2f2ed90abf8a27b9120c017c339090a)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`a8e93d1`](https://github.com/datajoint/element-array-ephys/commit/a8e93d162b1d70f0eb704973141198f750e52f76)) - -## v0.2.2 (2023-01-11) - -### Unknown - -* Merge pull request #122 from JaerongA/main - -Revert import order ([`eababbc`](https://github.com/datajoint/element-array-ephys/commit/eababbc4bb02ecc137641a19163dea8a5c8b6785)) - -* add back deleted version tags ([`bd0e76a`](https://github.com/datajoint/element-array-ephys/commit/bd0e76a685cb143bc631bc03a64df45123d65138)) - -* merge upstream & resolve conflicts ([`1feff92`](https://github.com/datajoint/element-array-ephys/commit/1feff92b61fb473a2c1464ed130fa2fa1b7f58df)) - -* Merge pull request #120 from kushalbakshi/main - -Docstring changes for docs API ([`623a381`](https://github.com/datajoint/element-array-ephys/commit/623a38112e6d7404421eece243de1d385faeb663)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`523f2e9`](https://github.com/datajoint/element-array-ephys/commit/523f2e97d3245f60c49f75496beae76d92bc074d)) - -* Update CHANGELOG to resolve merge conflicts ([`e613fc4`](https://github.com/datajoint/element-array-ephys/commit/e613fc45d34c83726d4bf032cb61c402a574c065)) - -* Docstring changes for docs API ([`87a7849`](https://github.com/datajoint/element-array-ephys/commit/87a7849a021ec4076624836a2b5d288448ada909)) - -* update CHANGELOG.md ([`5c7a772`](https://github.com/datajoint/element-array-ephys/commit/5c7a7722b46196a25e3cb85603a3487113dd6592)) - -* revert import order in __init__.py ([`956b96e`](https://github.com/datajoint/element-array-ephys/commit/956b96ec563b17912efe14d0764d6fedfe49f8a5)) - -* Add E402 in pre-commit-config ([`f4f283a`](https://github.com/datajoint/element-array-ephys/commit/f4f283a4209492e55dc82648434ecba55e05acfd)) - -## v0.2.1 (2023-01-09) - -### Chore - -* chore: :loud_sound: update CHANGELOG ([`2d43321`](https://github.com/datajoint/element-array-ephys/commit/2d4332189655eeffff7fd41fa071adcc2754ff16)) - -* chore: :rewind: revert formatting in concepts.md ([`c16b6bd`](https://github.com/datajoint/element-array-ephys/commit/c16b6bdcbadb97fae2b0c21d3dc8c13308fd4131)) - -* chore(deps): :pushpin: unpin plotly ([`8504b97`](https://github.com/datajoint/element-array-ephys/commit/8504b9724a129c617a8264cbbeb1c26c8a696d8e)) - -### Documentation - -* docs: :memo: add | update docstrings ([`4999d64`](https://github.com/datajoint/element-array-ephys/commit/4999d64980e4cf278f872159c7d327387939ee12)) - -* docs: :memo: name change & add docstring ([`d9c75c8`](https://github.com/datajoint/element-array-ephys/commit/d9c75c8ea425eb38dc8e714639ad341edf39cafd)) - -### Refactor - -* refactor: :pencil2: fix typos ([`efca82e`](https://github.com/datajoint/element-array-ephys/commit/efca82e352adee21c6979e94078c1c82b8b423aa)) - -* refactor(deps): :heavy_minus_sign: remove ibllib deps and add acorr func ([`c613164`](https://github.com/datajoint/element-array-ephys/commit/c613164ae90cac220c1726a9eb2e12f336f876db)) - -### Unknown - -* Merge pull request #116 from JaerongA/ephys_test - -modify build_electrodes function ([`0f518f1`](https://github.com/datajoint/element-array-ephys/commit/0f518f1b0cd60ffeb22a6345cadfbe0e72ecf2b3)) - -* Update element_array_ephys/probe.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`59c72c4`](https://github.com/datajoint/element-array-ephys/commit/59c72c4670abe6f0649eb829af14b289d605b98a)) - -* Update element_array_ephys/probe.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`c983fa8`](https://github.com/datajoint/element-array-ephys/commit/c983fa8967e5c0a20b2a7f7851d5622377ccb16e)) - -* Update element_array_ephys/probe.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`8e53f54`](https://github.com/datajoint/element-array-ephys/commit/8e53f5448b37dee63b678e3f641dcf505149c7ac)) - -* remove redundant type hinting ([`19c447a`](https://github.com/datajoint/element-array-ephys/commit/19c447af3ba77f8330ac7ace59e4e0e9e49fde52)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into ephys_test ([`52a06e9`](https://github.com/datajoint/element-array-ephys/commit/52a06e93b7591a01d7ead857eb96ae53dbbb8cfc)) - -* Merge pull request #118 from JaerongA/main - -Add pre-commit & update docstrings & various fixes ([`61cb23a`](https://github.com/datajoint/element-array-ephys/commit/61cb23a33be65df8d06d2e344548be8d160f25d2)) - -* pull upstream & resolve merge conflicts ([`26b6be9`](https://github.com/datajoint/element-array-ephys/commit/26b6be9788af03fbba53adb31447623d21ee43da)) - -* Merge branch 'datajoint:main' into main ([`c3ad36f`](https://github.com/datajoint/element-array-ephys/commit/c3ad36f8dfacfb3d58beaaee065c5d75d0a54b28)) - -* Merge pull request #121 from ttngu207/main - -parameterize run_CatGT step from parameters retrieved from `ClusteringParamSet` table ([`24df134`](https://github.com/datajoint/element-array-ephys/commit/24df134629819c17601eec7addf2ae4f359cc567)) - -* Update CHANGELOG.md ([`f5dff5c`](https://github.com/datajoint/element-array-ephys/commit/f5dff5cf04feae0d4c3c142fcc8769bb86d9c0a6)) - -* catGT checks and parameterizable ([`0ade344`](https://github.com/datajoint/element-array-ephys/commit/0ade344dc792ffce2dc25d412ea97a45b135c4d8)) - -* improve validate_file logic ([`63dbd12`](https://github.com/datajoint/element-array-ephys/commit/63dbd12011b1c9978da41664d83f4b36d3a42a19)) - -* update CHANGELOG.md ([`294d4f5`](https://github.com/datajoint/element-array-ephys/commit/294d4f5b8063f32bdc2f82858f10e7b8a0804e0d)) - -* Update element_array_ephys/version.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`b343c15`](https://github.com/datajoint/element-array-ephys/commit/b343c15b20919658b36e3ea0ddb3eef3f82dbf02)) - -* Apply suggestions from code review - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`4651707`](https://github.com/datajoint/element-array-ephys/commit/4651707843f3d78448563bbecc982592b99da035)) - -* Apply suggestions from code review - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`8920b00`](https://github.com/datajoint/element-array-ephys/commit/8920b00b617b2fc1932e0824740f5bd168715d47)) - -* Update CHANGELOG.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`b29e161`](https://github.com/datajoint/element-array-ephys/commit/b29e1613dd25280ab8e6745d2f9110e96287ec9f)) - -* Update CHANGELOG.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2889293`](https://github.com/datajoint/element-array-ephys/commit/28892939ba2b74aaa748ce4458e31f29e74f9f2a)) - -* Apply suggestions from code review - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`7e213c5`](https://github.com/datajoint/element-array-ephys/commit/7e213c5472b8830a0dedc7f2a727470e02bbfa51)) - -* Update setup.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`5649b61`](https://github.com/datajoint/element-array-ephys/commit/5649b61457ef8c74f1e3f3938870edeceea158c3)) - -* ✏️ fix typos ([`dc85370`](https://github.com/datajoint/element-array-ephys/commit/dc853709848062dc146fa21bf5fb1d11a81a4022)) - -* pull from upstream ([`bda30a3`](https://github.com/datajoint/element-array-ephys/commit/bda30a34214ccf47ac0ecd6fb0ab0bc9ff6101c1)) - -* Merge pull request #119 from CBroz1/qc - -Add QC dashboard ([`08d1291`](https://github.com/datajoint/element-array-ephys/commit/08d12914253cc7b4da1170db9dc233ba49cbc283)) - -* Remove interface 42 dependence ([`4f6c301`](https://github.com/datajoint/element-array-ephys/commit/4f6c30199ee2cec36dd86b5ddeb27062955f1f90)) - -* Cleanup ([`eed1eee`](https://github.com/datajoint/element-array-ephys/commit/eed1eeeb3dd23a33764bdde97b8e0f32ae245a8d)) - -* Cleanup, add comments ([`c31c53d`](https://github.com/datajoint/element-array-ephys/commit/c31c53da0f5cfe4a24221bb5cf73886915ff2ffc)) - -* Version bump + changelog ([`9eb34cd`](https://github.com/datajoint/element-array-ephys/commit/9eb34cdca25f271896c2e2c508db1cfb33d82373)) - -* Remove unused import ([`9989dc7`](https://github.com/datajoint/element-array-ephys/commit/9989dc741b8808e284a7eab3aa9c292cb1e1966b)) - -* Revert removed docstring item ([`ddeabc7`](https://github.com/datajoint/element-array-ephys/commit/ddeabc7b9d7ed11ade5ed0249ee5c2b4dc318e41)) - -* Run isort on qc.py ([`a65cbe4`](https://github.com/datajoint/element-array-ephys/commit/a65cbe49ed565c041f3f744c2d1b8eff2f6843a7)) - -* WIP: QC dashboard 2 ([`f997fd6`](https://github.com/datajoint/element-array-ephys/commit/f997fd6c82bddd33f6007dc3bea6ced29ed49bb2)) - -* WIP: QC dashboard ([`c873acf`](https://github.com/datajoint/element-array-ephys/commit/c873acfe9fc9a91eca2f4e6f45bd5c49a26380d6)) - -* update docstring ([`8561326`](https://github.com/datajoint/element-array-ephys/commit/8561326c3b2d2c97d5eb0a3f28dcd1d5647d4d2f)) - -* Merge pull request #3 from CBroz1/ja - -linter recommended changes, reduce linter exceptions ([`9a78c15`](https://github.com/datajoint/element-array-ephys/commit/9a78c15a5d553e18c19d1038b552a47ae399330b)) - -* Apply isort ([`904ccb9`](https://github.com/datajoint/element-array-ephys/commit/904ccb9ce2454276e005935bfee2d9b9cf0b181f)) - -* Apply hooks except isort ([`5645ebc`](https://github.com/datajoint/element-array-ephys/commit/5645ebc15cd29c005df36ee17cd316824ff50159)) - -* See details - -- Apply black -- Remove trailing whitespace -- Reduce flake8 exceptions -- Move imports to top -- Remove unused imports (e.g., re, pathlib, log_from_json) -- Add __all__ to init files specifying imports loaded -- Add `noqa: C901` selectively where complexity is high -- l -> line for _read_meta func in spikeglx.py -- give version a default value before loading ([`181677e`](https://github.com/datajoint/element-array-ephys/commit/181677e7622e1102679131974cf0fc567604ede1)) - -* fix docstrings ([`47de1d5`](https://github.com/datajoint/element-array-ephys/commit/47de1d5eac6b24608b20f74c9551b6c8040c9bd3)) - -* update concepts.md ([`bc95946`](https://github.com/datajoint/element-array-ephys/commit/bc95946656ebd5100b2e3c584b18b857451b3ab8)) - -* update version.py ([`6a997e1`](https://github.com/datajoint/element-array-ephys/commit/6a997e17aa56298b19d71c915a7116320b5d7ad1)) - -* pre-commit ignores docs, .github ([`e52c12e`](https://github.com/datajoint/element-array-ephys/commit/e52c12ed3509ce145853e709334043b8bd1c3272)) - -* update changelog.md ([`8f4ca3f`](https://github.com/datajoint/element-array-ephys/commit/8f4ca3fc216690a526392617cbc65a634da5f63a)) - -* add to ignored flake8 rules ([`16de049`](https://github.com/datajoint/element-array-ephys/commit/16de049e86b417895dd38f686a972b4af6df885e)) - -* import DataJointError ([`434e16a`](https://github.com/datajoint/element-array-ephys/commit/434e16a1013b48f057b0f19b7ee51fd63ac0cc36)) - -* update docstring ([`12098ce`](https://github.com/datajoint/element-array-ephys/commit/12098ceb5bb1b004998ff3e59d89a4e5c3b17c80)) - -* add requirements_dev.txt ([`d19f97f`](https://github.com/datajoint/element-array-ephys/commit/d19f97f22a15d609534796ae04257011fe518d35)) - -* add pre-commit-config ([`278e2f2`](https://github.com/datajoint/element-array-ephys/commit/278e2f2aeb462b3313abcfbaf93de2e206332fcb)) - -* Update element_array_ephys/ephys_report.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`b9fb872`](https://github.com/datajoint/element-array-ephys/commit/b9fb872621449710be8409239c1c0379cc59ed0c)) - -* Update element_array_ephys/ephys_report.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`61c739f`](https://github.com/datajoint/element-array-ephys/commit/61c739f0ca0c6cfb74b204919485cf36dc417a8c)) - -* update docstrings ([`816a0ed`](https://github.com/datajoint/element-array-ephys/commit/816a0ed2322cda25e32b8171499b6de327ca9d98)) - -* figures downloaded to temp folder ([`643637d`](https://github.com/datajoint/element-array-ephys/commit/643637d816bb69f4faf792b0c8b31718ec670cfe)) - -* Merge pull request #117 from ttngu207/main - -remove typing - keep consistency and maintain backward compatibility prior to python 3.9 ([`678cd95`](https://github.com/datajoint/element-array-ephys/commit/678cd951928d9e499f81252336dfcc47966e10f5)) - -* remove typing - keep consistency and maintain backward compatibility prior to python 3.9 ([`44a2582`](https://github.com/datajoint/element-array-ephys/commit/44a2582e0dbdb6b820101676de4cad41b6deb9ec)) - -* Merge pull request #114 from ttngu207/main - -Various fixes and improvements - no new feature ([`3635a4a`](https://github.com/datajoint/element-array-ephys/commit/3635a4abc1cc8fd7d82b0ca6c09ecd9e66a4be5d)) - -* add missing CHANGELOG link ([`c2c3482`](https://github.com/datajoint/element-array-ephys/commit/c2c34828c321ca141e1331db5e7f5da408364ba4)) - -* BLACK formatting ([`cbc3b62`](https://github.com/datajoint/element-array-ephys/commit/cbc3b6286fcb9bd0b13bc803e7739c1b0b9dfd34)) - -* BLACK formatting ([`6b375f9`](https://github.com/datajoint/element-array-ephys/commit/6b375f998089114c051a829d9541d8acfa8a5fbe)) - -* Update CHANGELOG.md ([`72e784b`](https://github.com/datajoint/element-array-ephys/commit/72e784b6a2e202da18024f5026df002768901cbf)) - -* BLACK formatting ([`b6ce2f7`](https://github.com/datajoint/element-array-ephys/commit/b6ce2f7dc5d3fb37f3f277399794c095d58ebc0a)) - -* Merge branch 'datajoint:main' into main ([`731c103`](https://github.com/datajoint/element-array-ephys/commit/731c10313c4b01da9bfb440227d48c4118600dd0)) - -* Merge pull request #115 from iamamutt/main - -Remove ibllib dependency ([`561df39`](https://github.com/datajoint/element-array-ephys/commit/561df399a01a113346b8b2c9619fc7f98b953414)) - -* Merge pull request #1 from JaerongA/ephys_test - -fix module name & add docstrings ([`dd6e215`](https://github.com/datajoint/element-array-ephys/commit/dd6e215df2072aece593d2a8d02e67d3fed3fd47)) - -* Update CHANGELOG.md - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`ed5bbb9`](https://github.com/datajoint/element-array-ephys/commit/ed5bbb9e86d09870a5497a035de7e3eddf84d74c)) - -* update changelog ([`d07a93f`](https://github.com/datajoint/element-array-ephys/commit/d07a93ff5f8ca734847a412cbad4747e4c861383)) - -* bugfix - fix "probe_indices" in single probe recording ([`2676a16`](https://github.com/datajoint/element-array-ephys/commit/2676a161325f18dae13be73849556557a8cea79d)) - -* handles single probe recording in "Neuropix-PXI" format ([`1859085`](https://github.com/datajoint/element-array-ephys/commit/1859085e7133f5971862e0c74eb1176b83e7e426)) - -* safeguard in creating/inserting probe types upon `probe` activation ([`c2d9f47`](https://github.com/datajoint/element-array-ephys/commit/c2d9f47c0871b16ad82570fdd80ef802011f97b8)) - -* bugfix logging median subtraction duration ([`9ec904f`](https://github.com/datajoint/element-array-ephys/commit/9ec904f6697e4cddca068884613e42bbda092098)) - -* bugfix in updating median-subtraction duration ([`8ec0f71`](https://github.com/datajoint/element-array-ephys/commit/8ec0f713e461678c3288bfec083a0f762e885651)) - -* update duration for `median_subtraction` step ([`bd2ff1c`](https://github.com/datajoint/element-array-ephys/commit/bd2ff1cfe25bbc32c7b5ffc63add008d34bdd655)) - -* update docstring ([`68fa77c`](https://github.com/datajoint/element-array-ephys/commit/68fa77c2ed49c9603bccd64fcb75b92ac5b642e8)) - -* Apply suggestions from code review - -Co-authored-by: Thinh Nguyen <thinh@datajoint.com> ([`cd9501c`](https://github.com/datajoint/element-array-ephys/commit/cd9501c1c773df08f4eea740ef312431e9ec5a1c)) - -* fix docstring & formatting ([`9fc7477`](https://github.com/datajoint/element-array-ephys/commit/9fc7477abc7b975c3435d54a83063a248c50d42e)) - -* fix docstring in probe.py ([`7958727`](https://github.com/datajoint/element-array-ephys/commit/7958727e42bd5e235112fd3d14ef3435d8c6dcc5)) - -* feat: ([`ff3fca0`](https://github.com/datajoint/element-array-ephys/commit/ff3fca0ddc94f3921a06d8593005a2115ccdb930)) - -* remove proj() ([`496c210`](https://github.com/datajoint/element-array-ephys/commit/496c210a14acf6d5de4f3b67049a1af8738579cf)) - -* revert: :adhesive_bandage: revert to um ([`5a7f068`](https://github.com/datajoint/element-array-ephys/commit/5a7f06868a3ddff6616e7010d2cbceae944544aa)) - -* add probe_type in electrode_layouts ([`633f745`](https://github.com/datajoint/element-array-ephys/commit/633f7455c8dba47986b361380af6e73b88595b1b)) - -* spacing defaults to none ([`8f6e280`](https://github.com/datajoint/element-array-ephys/commit/8f6e28083d2e132eadef0e3cdb2b625fb43077bf)) - -* Update element_array_ephys/probe.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`704f6ed`](https://github.com/datajoint/element-array-ephys/commit/704f6ed65c818037d33617cb424157f0d305fa5f)) - -* remove mapping dict ([`48ab889`](https://github.com/datajoint/element-array-ephys/commit/48ab889c0f3ac48813065d2243346fdf0e23e41d)) - -* col_count_per_shank ([`be3bd11`](https://github.com/datajoint/element-array-ephys/commit/be3bd11f720822565b59d80278f2eaa8b2cfc6cb)) - -* site_count_per_shank ([`cb21f61`](https://github.com/datajoint/element-array-ephys/commit/cb21f6154d737d2b59b2b5fec2690a8a247bad6f)) - -* modify build_electrodes function ([`1c7160c`](https://github.com/datajoint/element-array-ephys/commit/1c7160c33b1e316e49826a58dcf842790a18c94a)) - -* Merge branch 'main' of https://github.com/iamamutt/element-array-ephys into ephys_test ([`8a65635`](https://github.com/datajoint/element-array-ephys/commit/8a65635dc78952e4227ae8c89a168dcc09a2b192)) - -* Merge remote-tracking branch 'upstream/main' ([`b3a07b8`](https://github.com/datajoint/element-array-ephys/commit/b3a07b82d32f2d89d3a72ff4523e46c0518be20c)) - -* Merge pull request #113 from tdincer/main - -Update README.md ([`85a1f0a`](https://github.com/datajoint/element-array-ephys/commit/85a1f0a238102122a0a38aed450648ce8477e4b6)) - -* Update README.md ([`7a5f843`](https://github.com/datajoint/element-array-ephys/commit/7a5f843568f0ebcc86681b8d802b494087d6e520)) - -* Merge branch 'datajoint:main' into main ([`8dd5f29`](https://github.com/datajoint/element-array-ephys/commit/8dd5f29d4ce3070e430a31b8ab1f20cb800742b4)) - -* Merge pull request #111 from kabilar/main - -Fix for cicd and other ux fixes ([`2e63edc`](https://github.com/datajoint/element-array-ephys/commit/2e63edc39de704ed77650f21643d50f54c433214)) - -* Move datajoint diagrams ([`8f006c0`](https://github.com/datajoint/element-array-ephys/commit/8f006c040fd82c4250e58bce220f064ff640aca2)) - -* Remove empty spaces to compare with ephys modules ([`863d9b1`](https://github.com/datajoint/element-array-ephys/commit/863d9b1152525dbb129c71dab40e2e22183a06d1)) - -* Fix bug ([`8731def`](https://github.com/datajoint/element-array-ephys/commit/8731def1d3edc1ba1f6d1735984c05d93903b35d)) - -* Fix diagram ([`566bc64`](https://github.com/datajoint/element-array-ephys/commit/566bc64d739fb0ec5f62b01887284aa6988f198b)) - -* Update text ([`48900e8`](https://github.com/datajoint/element-array-ephys/commit/48900e87cae10f1daf2d2f8e3e33520354b65f88)) - -* Merge pull request #110 from CBroz1/docs2 - -Add diagram text layer ([`34912bf`](https://github.com/datajoint/element-array-ephys/commit/34912bf062f738c9535042a8d9a55f5f1d3c74a5)) - -* Add diagram text layer ([`638ebc4`](https://github.com/datajoint/element-array-ephys/commit/638ebc4f23b9f1a7a5edc8c039a0e24dac30cc1d)) - -* Merge pull request #109 from CBroz1/docs2 - -Docs2 ([`23bf669`](https://github.com/datajoint/element-array-ephys/commit/23bf66956ca094e053ff3f8b22b612f0e018d6d7)) - -* Update diagram ([`07f0733`](https://github.com/datajoint/element-array-ephys/commit/07f0733ef00c0bf855c886a9571336768b8f51ff)) - -* Add diagram ([`38cc7ab`](https://github.com/datajoint/element-array-ephys/commit/38cc7ab4d0678d60cac03729332f6c548fc4c4fd)) - -* Update logo/styling. Hard wrap ([`4d22a16`](https://github.com/datajoint/element-array-ephys/commit/4d22a169094c437b7a80a92c51b92651ec3d5042)) - -* datatype clarification ([`c353400`](https://github.com/datajoint/element-array-ephys/commit/c353400e967e942e81148f60ac77c792ff68eccf)) - -* fix docstring typo ([`50d3dd1`](https://github.com/datajoint/element-array-ephys/commit/50d3dd1fcfe6a4ffd75933e04c5e7cb28564b83e)) - -* Merge pull request #107 from kushalbakshi/main - -Updated CHANGELOG ([`2af7fc5`](https://github.com/datajoint/element-array-ephys/commit/2af7fc55a1f5348694a53c39cc338855f2bf5ae2)) - -## v0.2.0 (2022-11-03) - -### Feature - -* feat: :sparkles: Merge branch 'plotly' into no_curation_plot ([`06c1064`](https://github.com/datajoint/element-array-ephys/commit/06c1064dd890d68afc90a9cc3aca3961edab2691)) - -* feat: :sparkles: add a report schema and plotting png figures ([`66743cc`](https://github.com/datajoint/element-array-ephys/commit/66743cc3dcdc22a35ddd24f7c694278c2903957a)) - -### Fix - -* fix: :bug: use to_plotly_json() instead of to_json() ([`69b2796`](https://github.com/datajoint/element-array-ephys/commit/69b2796285690bcb68d2e3185608a6e33172c0ea)) - -### Unknown - -* Updated CHANGELOG ([`bc5afcc`](https://github.com/datajoint/element-array-ephys/commit/bc5afcc97b23b8032722985158e69b6b01eb34f2)) - -* Merge pull request #102 from kushalbakshi/main - -Added docs + docstrings ([`e04841b`](https://github.com/datajoint/element-array-ephys/commit/e04841b965700551046b29efd98d27577a5c4495)) - -* Update element_array_ephys/ephys_precluster.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`a73fcd2`](https://github.com/datajoint/element-array-ephys/commit/a73fcd2df9ee12860b2d548a45118ac6f5384b51)) - -* Update element_array_ephys/ephys_no_curation.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`36b8161`](https://github.com/datajoint/element-array-ephys/commit/36b8161359cded22c65ca863ebf22327652aeffd)) - -* Update element_array_ephys/ephys_chronic.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`e6e0b21`](https://github.com/datajoint/element-array-ephys/commit/e6e0b219924416df1012a3bb474f7c31f0207c61)) - -* Update element_array_ephys/ephys_acute.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`f2d550e`](https://github.com/datajoint/element-array-ephys/commit/f2d550e6903bf3e48802cb705bcccd48d1f9f765)) - -* Version and CHANGELOG 0.2.1 -> 0.2.0 ([`2e0cffe`](https://github.com/datajoint/element-array-ephys/commit/2e0cffefc57a2ef2fd7fcb84c1a41796cd31d7bd)) - -* update CHANGELOG ([`473ca98`](https://github.com/datajoint/element-array-ephys/commit/473ca98f2ecfc351fd17ab5d21bf35d25a269bfb)) - -* Updated CHANGELOG and version ([`8b4f4fa`](https://github.com/datajoint/element-array-ephys/commit/8b4f4fac82b6ccb3d95332d62b1a8b317139f8cc)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`dc52a6e`](https://github.com/datajoint/element-array-ephys/commit/dc52a6eba8944aecc239e254d1b421079213bab0)) - -* Merge pull request #106 from ttngu207/main - -add to changelog, bump version ([`89f1d7c`](https://github.com/datajoint/element-array-ephys/commit/89f1d7c7ffdcf49e52548222554a64de96f3e2ea)) - -* Apply suggestions from code review - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> -Co-authored-by: Tolga Dincer <tolgadincer@gmail.com> ([`8f3e76e`](https://github.com/datajoint/element-array-ephys/commit/8f3e76edf254f18be9235d378ad5f839d50a4a51)) - -* add to changelog, bump version ([`02fb576`](https://github.com/datajoint/element-array-ephys/commit/02fb5765d6f71ba558d2e22fc72e970076a875c8)) - -* Merge pull request #103 from JaerongA/no_curation_plot - -add ephys_report schema for data visualizations ([`3e9b675`](https://github.com/datajoint/element-array-ephys/commit/3e9b675581ed16b2704836a94cf1e55195e35f67)) - -* Update element_array_ephys/plotting/widget.py - -Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`d0c6797`](https://github.com/datajoint/element-array-ephys/commit/d0c67970e18e435d3c7e298a0d59d6ce42536bee)) - -* f-string formatting ([`14a970f`](https://github.com/datajoint/element-array-ephys/commit/14a970f2e500288f54b33176905dd7037b85b962)) - -* improve clarity and reduce complexity for activating and using the `ephys_report` module ([`5d41039`](https://github.com/datajoint/element-array-ephys/commit/5d41039c7898ce73fa683de6d426636a6099121a)) - -* Merge branch 'no_curation_plot' of https://github.com/JaerongA/element-array-ephys into no_curation_plot ([`d7fb6df`](https://github.com/datajoint/element-array-ephys/commit/d7fb6dff697ed7fa8f89b422f4621dca67fd5886)) - -* Apply suggestions from code review - -Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`e773e94`](https://github.com/datajoint/element-array-ephys/commit/e773e94f22b9334b3586c8d7f7083afa699009bf)) - -* plot only the shank with the peak electrode ([`ce9abcc`](https://github.com/datajoint/element-array-ephys/commit/ce9abcc16ad02c0b54e122e4ed3c72cb33b1074c)) - -* new method for getting x, y spacing between sites ([`2eb9540`](https://github.com/datajoint/element-array-ephys/commit/2eb9540a436d08d35f1fd9cd7df205ef3a126afe)) - -* fix code to calculate site spacing & figure reformatting ([`b48aaf0`](https://github.com/datajoint/element-array-ephys/commit/b48aaf078390954ad5be39b29fc84fa6e7db340d)) - -* add vscode in .gitignore ([`48e0744`](https://github.com/datajoint/element-array-ephys/commit/48e07444f0239b6479cb27dc40d59300b11e0691)) - -* fixed typo & black formatting ([`f1d6a87`](https://github.com/datajoint/element-array-ephys/commit/f1d6a87df42d89ed7b33acd299cddcd35c4d1f71)) - -* clean up import & remove wrong documentation ([`dc9b293`](https://github.com/datajoint/element-array-ephys/commit/dc9b2932ea9a0cd9e87c57940d0960a5f84d15bc)) - -* remove zip function ([`357cda9`](https://github.com/datajoint/element-array-ephys/commit/357cda99405b2605dad4d5b76142de8d0c9db6f2)) - -* remove zip function ([`290feca`](https://github.com/datajoint/element-array-ephys/commit/290fecab3299a32eb1650b7f58090027f6fab87e)) - -* add skip_duplicates=True in probe.Probe.insert ([`dab5dfe`](https://github.com/datajoint/element-array-ephys/commit/dab5dfe0e89f9cc668ce024403839c7b21ae0e0f)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`5d50894`](https://github.com/datajoint/element-array-ephys/commit/5d5089499c9fce8a260b3a51381fa3a254e48ec6)) - -* Update element_array_ephys/ephys_chronic.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`35d2044`](https://github.com/datajoint/element-array-ephys/commit/35d2044abe97fbcf3351c01b6088007382a74f8d)) - -* pip plotly version ([`0e86529`](https://github.com/datajoint/element-array-ephys/commit/0e8652921ef7f97c64376dcc3367f3ec1e68cd7c)) - -* widget takes the activated ephys schema as an input ([`e302cf8`](https://github.com/datajoint/element-array-ephys/commit/e302cf87cf98520d467808998ae548ae44580676)) - -* add ephys widget ([`fb48db6`](https://github.com/datajoint/element-array-ephys/commit/fb48db69b07c15e05b3ff9a3ebd2c0e7890a8c58)) - -* update unit & probe widget to be on the same widget ([`a41d7ba`](https://github.com/datajoint/element-array-ephys/commit/a41d7ba6c8567727ef5d3ba9edea48cddcfe525a)) - -* add widget event handler ([`d2b07d5`](https://github.com/datajoint/element-array-ephys/commit/d2b07d531ab2ac91f496b604084d4f5fe7279424)) - -* add ipywidget ([`cdaa931`](https://github.com/datajoint/element-array-ephys/commit/cdaa931804356bd71f7e4642e2e380a5efb19b69)) - -* adjust the figure size ([`00052e5`](https://github.com/datajoint/element-array-ephys/commit/00052e5746351a1194a861c4524d12eb0579e93b)) - -* fix naming convention ([`aab4ead`](https://github.com/datajoint/element-array-ephys/commit/aab4eadb914fa9302615ead69d8ef08545fa8775)) - -* update the probe widget ([`b3c3f5b`](https://github.com/datajoint/element-array-ephys/commit/b3c3f5bf9249105182c9bea10e392cb05ad1011b)) - -* update dependencies ([`75e15d2`](https://github.com/datajoint/element-array-ephys/commit/75e15d2f3154eeca5e69c95bb5a3961a1c657a5a)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into no_curation_plot ([`5027d17`](https://github.com/datajoint/element-array-ephys/commit/5027d17537c131d6e4959d5fad2c7e1466ca5db3)) - -* resolve circular dependency & reformatting ([`2b69ab5`](https://github.com/datajoint/element-array-ephys/commit/2b69ab53e5415813414c967342f50ed1afb00b30)) - -* add shank in ProbeLevelReport & formatting ([`36ce21f`](https://github.com/datajoint/element-array-ephys/commit/36ce21f3809b2a890d0603a8a054dad77ddf6e1c)) - -* Merge pull request #1 from ttngu207/ephys_report - -resolve "activation" and circular dependency ([`f191214`](https://github.com/datajoint/element-array-ephys/commit/f19121470d02ebe9f0ca77f4e09f9e3bb71ddd8e)) - -* Update element_array_ephys/plotting/unit_level.py ([`36f128a`](https://github.com/datajoint/element-array-ephys/commit/36f128a411853e6e96614aac372b5c392ce307e0)) - -* resolve "activation" and circular dependency ([`4693728`](https://github.com/datajoint/element-array-ephys/commit/46937287c9c4b05efd2f6d121b6ea0142a2aa845)) - -* change report to ephys_report ([`f7ea1b8`](https://github.com/datajoint/element-array-ephys/commit/f7ea1b81281be6c39a39e1e8900c291b439de920)) - -* convert the unit report figures to plotly ([`275b479`](https://github.com/datajoint/element-array-ephys/commit/275b479da06a7ac7b5a188f6d1e57cf39a2bee69)) - -* Merge branch 'main' into no-curation ([`6af6206`](https://github.com/datajoint/element-array-ephys/commit/6af6206e7a14adddbde98288a4e6c460a19516bb)) - -* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`ab7b78c`](https://github.com/datajoint/element-array-ephys/commit/ab7b78c1dea17e173d0365c647066d1a02c2e22f)) - -* Update element_array_ephys/ephys_precluster.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`f6b93d7`](https://github.com/datajoint/element-array-ephys/commit/f6b93d75768053636cbc495a1600a19b9535f0b7)) - -* Update element_array_ephys/ephys_no_curation.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`486d938`](https://github.com/datajoint/element-array-ephys/commit/486d938a5b06913927d5eef1c593b0ee8365153f)) - -* Update element_array_ephys/ephys_chronic.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`ab1a067`](https://github.com/datajoint/element-array-ephys/commit/ab1a0675b52d29abd9900d4286041d49e35e953a)) - -* Roadmap updated in concepts.md ([`aec85cc`](https://github.com/datajoint/element-array-ephys/commit/aec85cc35f3a17b217a13012041fcb5c4492e75d)) - -* `enum` attribute description updated ([`4f28ad1`](https://github.com/datajoint/element-array-ephys/commit/4f28ad15a10449d19bcafb98e5b592521ac8769d)) - -* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`c5d4882`](https://github.com/datajoint/element-array-ephys/commit/c5d488239edd0fc3490c41c86108009662aff629)) - -* Update docs/mkdocs.yaml - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`2355a46`](https://github.com/datajoint/element-array-ephys/commit/2355a465c6ee5611c0a8b04339af45d782b48496)) - -* Sentence case in concepts.md ([`72024ea`](https://github.com/datajoint/element-array-ephys/commit/72024ea3ea4de13738244736b710d08cdc00014b)) - -* References added to concepts + version change ([`2528e6d`](https://github.com/datajoint/element-array-ephys/commit/2528e6dea5788f5929b31eebc86930da6c03d44d)) - -* Minor formatting update to docstrings ([`5083b78`](https://github.com/datajoint/element-array-ephys/commit/5083b78b2ad78c852d5bea4747a95a16f5b663e8)) - -* Merge branch 'main' of https://github.com/kushalbakshi/element-array-ephys ([`b030951`](https://github.com/datajoint/element-array-ephys/commit/b03095182e50e8700a7f78f815db9b7df401ad2e)) - -* Update element_array_ephys/ephys_precluster.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`38f2410`](https://github.com/datajoint/element-array-ephys/commit/38f2410bed0f98de478d7f1795a08e065bedd4eb)) - -* Update element_array_ephys/ephys_precluster.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`ca1441f`](https://github.com/datajoint/element-array-ephys/commit/ca1441f24446c89e63d708deef9318de6f83a543)) - -* Update element_array_ephys/probe.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`98f8a8c`](https://github.com/datajoint/element-array-ephys/commit/98f8a8c2e62a06baaa80e61d634cff7bf25acc0b)) - -* Updated docstrings after code review ([`17d9e4a`](https://github.com/datajoint/element-array-ephys/commit/17d9e4aed104ee047c77557591c187259ef6575a)) - -* Changes applied from code review ([`c75e2d7`](https://github.com/datajoint/element-array-ephys/commit/c75e2d71e485336050ad94a1016bd9af1cc0e432)) - -* Update docs/mkdocs.yaml - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`37d6a60`](https://github.com/datajoint/element-array-ephys/commit/37d6a6098632044474a9c2caae97ece16c4b2c68)) - -* Automated versioning added ([`7431058`](https://github.com/datajoint/element-array-ephys/commit/74310589f076f2f44283c0d3ff3cd46ac0de54e0)) - -* science-team to concepts + hard wrap test ([`b0e755f`](https://github.com/datajoint/element-array-ephys/commit/b0e755f6cc14313eb83e2a2e09524f751d0c5ff0)) - -* Docstrings added ([`0feee75`](https://github.com/datajoint/element-array-ephys/commit/0feee75270d8a4a6e99507937f2aa530bd4b7dcd)) - -* Updated docs based on DLC merge ([`01322db`](https://github.com/datajoint/element-array-ephys/commit/01322db39dc06a540799142788c3173287323347)) - -* Updates mirroring DLC ([`0111dcd`](https://github.com/datajoint/element-array-ephys/commit/0111dcdf3ab3e7e1d3b95a2e4a8fcc7ac3ca3cef)) - -* Fixes to docs after local testing + docstrings ([`7135ce1`](https://github.com/datajoint/element-array-ephys/commit/7135ce19de7080b5c4fe11f1785f3ce46433c924)) - -* Updated docstrings + Dockerfiles ([`e7423e2`](https://github.com/datajoint/element-array-ephys/commit/e7423e2d330c17a2e68f7aee4f06237495d30fd3)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys ([`afb64eb`](https://github.com/datajoint/element-array-ephys/commit/afb64ebc93d33a3aa06407e7b96f98925f4c5fad)) - -* Merge pull request #94 from datajoint/run_kilosort - -`run_kilosort` -> `main` ([`db75e4d`](https://github.com/datajoint/element-array-ephys/commit/db75e4dbd3770ce54b43d16ed652e338d440eab2)) - -* Merge pull request #97 from ttngu207/no-curation - -pull from main - add QC ([`03e2d5f`](https://github.com/datajoint/element-array-ephys/commit/03e2d5fcfbfa2daa15d2962676903ede80daa497)) - -* specify lfp filepath as input ([`10d12a6`](https://github.com/datajoint/element-array-ephys/commit/10d12a696057ef44da373cbad0029350fc60761e)) - -* smart handling of finished median subtraction step ([`34e59cc`](https://github.com/datajoint/element-array-ephys/commit/34e59cc20ca31472ddba07e190c940ca432b6b99)) - -* modify `extracted_data_directory` path - same as ks output path ([`296f7c6`](https://github.com/datajoint/element-array-ephys/commit/296f7c672296a2502eb84bbc03e7138e42da00bf)) - -* bugfix QC ingestion ([`2d76efc`](https://github.com/datajoint/element-array-ephys/commit/2d76efcccb4f47569a4de9f4c4a2ca215b45146c)) - -* bugfix - remove `%` in attributes' comments ([`d008b05`](https://github.com/datajoint/element-array-ephys/commit/d008b051f0d4752b9582d642e5f948d84386b902)) - -* add QC to `ephys_no_curation` ([`db448f7`](https://github.com/datajoint/element-array-ephys/commit/db448f72a1f7bc091edfd3c29dabadeea71c6d24)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into no-curation ([`ef486ed`](https://github.com/datajoint/element-array-ephys/commit/ef486ed2e686ed1c59f4364d8570dc52c96c2347)) - -* median subtraction on a copied data file ([`32bff24`](https://github.com/datajoint/element-array-ephys/commit/32bff24a9f325673fc8606fee2d916a894000e80)) - -* Merge pull request #93 from CBroz1/rk - -Ensure Path type for get_spikeglx_meta_filepath ([`a738ee7`](https://github.com/datajoint/element-array-ephys/commit/a738ee74a4bd593a43332676ed5bcfa005434319)) - -* Ensure Path type for get_spikeglx_meta_filepath ([`0e94252`](https://github.com/datajoint/element-array-ephys/commit/0e942523d300ac01d56ae8419cb62e696d3abae2)) - -* Merge pull request #91 from ttngu207/no-curation - -bugfix for catgt ([`6757ef7`](https://github.com/datajoint/element-array-ephys/commit/6757ef738b95ad8c7cc7e305a7a9a0f96de42fe9)) - -* Update kilosort_triggering.py ([`71d87ae`](https://github.com/datajoint/element-array-ephys/commit/71d87aecfe37d6a435f5cc819ee389ed621d9772)) - -* Merge pull request #90 from ttngu207/no-curation - -enable `CatGT` ([`23ca7ca`](https://github.com/datajoint/element-array-ephys/commit/23ca7ca6d2474daab71d8a00c4c492d4395a2667)) - -* improve error handling ([`f60ba3d`](https://github.com/datajoint/element-array-ephys/commit/f60ba3d6a0c0776d1f7b97d02b9d1608a5788a0e)) - -* bugfix - path search for catgt output ([`c33d1b0`](https://github.com/datajoint/element-array-ephys/commit/c33d1b02811dbaf7e492da60e70819d5102798ff)) - -* `missing_ok` arg only available in python 3.8+ ([`35da39b`](https://github.com/datajoint/element-array-ephys/commit/35da39bb1c5c80c18ade40fed296cfb47b158123)) - -* bugfix ([`fbdbe24`](https://github.com/datajoint/element-array-ephys/commit/fbdbe24adc55c6fb51f4e524760528f27589f37b)) - -* bugfix ([`aed42ca`](https://github.com/datajoint/element-array-ephys/commit/aed42ca1dddd15a6b2a03c329f9278122846c55b)) - -* enable catgt run ([`987231b`](https://github.com/datajoint/element-array-ephys/commit/987231be7d386351a043901d19f55f76b6bbf90d)) - -* bugfix in running catgt ([`5905392`](https://github.com/datajoint/element-array-ephys/commit/59053923a9968df6f2ab7f90d9324c7502dcaf0e)) - -* Update kilosort_triggering.py ([`f9f18d0`](https://github.com/datajoint/element-array-ephys/commit/f9f18d0864615f24420b3feb982b68dfd85dd6d7)) - -* Merge pull request #89 from ttngu207/no-curation - -implement data compression using `mtscomp` for openephys and spikeglx for neuropixels data ([`c153e7f`](https://github.com/datajoint/element-array-ephys/commit/c153e7f4f0875a08f097e6af653bb6e8248c5c08)) - -* garbage collect openephys objects ([`d8aea04`](https://github.com/datajoint/element-array-ephys/commit/d8aea041f35ba2b92e8f619f4b3729123852116a)) - -* garbage collect openephys objects ([`97f3d21`](https://github.com/datajoint/element-array-ephys/commit/97f3d21ce0ae6e5827070d093893ab836665998a)) - -* implement data compression using `mtscomp` for openephys and spikeglx neuropixels data ([`b2bd0ee`](https://github.com/datajoint/element-array-ephys/commit/b2bd0eeab31a63d95fcaf84aaafb436289da8838)) - -* Merge pull request #88 from ttngu207/no-curation - -overall code cleanup/improvement for more robust and optimal kilosort run ([`ad8436e`](https://github.com/datajoint/element-array-ephys/commit/ad8436e8535ab34fdb24efea7e0aa9bc5d2d6178)) - -* Merge branch 'no-curation' of https://github.com/ttngu207/element-array-ephys into no-curation ([`fd331bd`](https://github.com/datajoint/element-array-ephys/commit/fd331bdefc036eb9e08fad83b8ffba41dc037ec7)) - -* remove space escaping character ([`b71b459`](https://github.com/datajoint/element-array-ephys/commit/b71b459744b212251d0685b7bebb82d859fc8723)) - -* improve kilosort calls, handle spaces in paths ([`0c77826`](https://github.com/datajoint/element-array-ephys/commit/0c77826af1141d0e2d5828736252b33e56734af5)) - -* improve error message ([`a3c5c2f`](https://github.com/datajoint/element-array-ephys/commit/a3c5c2fb9c03e3b6df293ed0e8fb58f17a20ef78)) - -* bugfix, match new implementation for openephys ([`3f1ee37`](https://github.com/datajoint/element-array-ephys/commit/3f1ee371bec5c68a2c9838082df87b6368074ebd)) - -* code cleanup, minor bugfix ([`b97566e`](https://github.com/datajoint/element-array-ephys/commit/b97566e6b833d610377e93cd21a08a0272f3a075)) - -* improve logic for running kilosort modules in a resumable fashion ([`9a59e57`](https://github.com/datajoint/element-array-ephys/commit/9a59e574dd25176a8da9b8142d6e87aeed3c5f74)) - -* Merge pull request #86 from CBroz1/rk - -Changes for codebook deployment ([`d9c3887`](https://github.com/datajoint/element-array-ephys/commit/d9c38873e3371dd045bb90e199a0a58caa5e701b)) - -* WIP: version bump pynwb to 2.0 ([`0221848`](https://github.com/datajoint/element-array-ephys/commit/0221848b6466daba6553e1e1a5967a2dee08c954)) - -* Merge branch 'run_kilosort' of https://github.com/datajoint/element-array-ephys into rk ([`13d74ad`](https://github.com/datajoint/element-array-ephys/commit/13d74ad73efac3cd5f94f1ffe374c762a95c936d)) - -* Merge pull request #77 from ttngu207/no-curation - -more robust loading of openephys format ([`d298b07`](https://github.com/datajoint/element-array-ephys/commit/d298b07c0ba7805a5efeee2d9db703cc35913925)) - -* more robust loading of openephys format ([`67039ac`](https://github.com/datajoint/element-array-ephys/commit/67039ac51bd87754a610053bf8e85d0a958f42be)) - -* Merge pull request #73 from ttngu207/no-curation - -update openephys loader - handling open ephys v0.6.0 ([`9272ee6`](https://github.com/datajoint/element-array-ephys/commit/9272ee64417248b7afe85786ae60366710c0f0ff)) - -* added loading of electrode location for new openephys format ([`4e367d7`](https://github.com/datajoint/element-array-ephys/commit/4e367d72dc9928b69553613844773ad38c98ea91)) - -* update open ephys loader to handle "STREAM" in latest format ([`07604e2`](https://github.com/datajoint/element-array-ephys/commit/07604e24c185421566122024b3d8b8c3f60b4475)) - -* Merge pull request #70 from ttngu207/no-curation - -bugfix for LFP electrode mapping ([`747c15f`](https://github.com/datajoint/element-array-ephys/commit/747c15f5b481790e6e36131dfee8ab932eeb6220)) - -* bugfix for LFP electrode mapping ([`f11e016`](https://github.com/datajoint/element-array-ephys/commit/f11e0161e949bc572f0ac7ee2e0b46096fa00351)) - -* `kilosort2` also as part of the `contents` for ClusteringMethod ([`f4b917d`](https://github.com/datajoint/element-array-ephys/commit/f4b917d0eada17b3c54c4922ef2295f368569855)) - -* Update requirements.txt ([`a578d85`](https://github.com/datajoint/element-array-ephys/commit/a578d851db1cc439b6c1bc380bdb6ee6d6af4789)) - -* Add contact info to Code of Conduct ([`70e0b1c`](https://github.com/datajoint/element-array-ephys/commit/70e0b1c3f18d7e150957e5ad943a9f3c76d130e3)) - -* Add Code of Conduct ([`e43e5d5`](https://github.com/datajoint/element-array-ephys/commit/e43e5d5b88ca82c1be14b7392c56397a9162b7c6)) - -* Issue #63 ([`d102f6f`](https://github.com/datajoint/element-array-ephys/commit/d102f6fa5bb79c8ddf23e3c6df4e200cb3c02a25)) - -* Merge branch 'rk' of https://github.com/CBroz1/element-array-ephys into rk ([`bd6d7e4`](https://github.com/datajoint/element-array-ephys/commit/bd6d7e471ea322e4675dbf23bd253e53ac057ca1)) - -* Update README.md - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`720e355`](https://github.com/datajoint/element-array-ephys/commit/720e355c9201f79d1638a15099e325ef0dda76cd)) - -* Issue #11 ([`4a3e0bf`](https://github.com/datajoint/element-array-ephys/commit/4a3e0bf0332abcab167e7fc65c99de3f98f39e2f)) - -* WIP: nwb line length, Readme mention of ([`e1c9b35`](https://github.com/datajoint/element-array-ephys/commit/e1c9b355d1270acd6ccd144106b173e68c0e6654)) - -* WIP: NWB fix - prevent SQL err by restricting key ([`b62fd12`](https://github.com/datajoint/element-array-ephys/commit/b62fd12f5d94ccd70aa5d816ac5c2320b7d8520d)) - -* WIP: nwb bugfix ([`49bba8a`](https://github.com/datajoint/element-array-ephys/commit/49bba8a2f03fd1e0a74910bf360dc1b68308f261)) - -* Merge pull request #69 from ttngu207/no-curation - -Add no-curation version and run kilosort analysis ([`364f80e`](https://github.com/datajoint/element-array-ephys/commit/364f80ed261e6297c20b14a3915ba29b6beb7cb4)) - -* Merge remote-tracking branch 'upstream/run_kilosort' into no-curation ([`ddd4095`](https://github.com/datajoint/element-array-ephys/commit/ddd409543cc531ef11c1961fb50ffc1a5a516772)) - -* Merge branch 'no-curation' of https://github.com/ttngu207/element-array-ephys into no-curation ([`7fecff1`](https://github.com/datajoint/element-array-ephys/commit/7fecff10fd4b530af00e1e408b723fab93787fd4)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`af864d7`](https://github.com/datajoint/element-array-ephys/commit/af864d73d5925458d0955d6c0556366202ad88ee)) - -* added assertion - safeguard against failed loading of continuous.dat ([`47babf3`](https://github.com/datajoint/element-array-ephys/commit/47babf37c9556d1d496cd4d7687baea7d57cd7eb)) - -* handles new probe naming in latest Open Ephys format ([`cd5fe70`](https://github.com/datajoint/element-array-ephys/commit/cd5fe70261b48142f4406087eacdde96c141de99)) - -* update openephys loader - handle new open ephys format ([`11a12ba`](https://github.com/datajoint/element-array-ephys/commit/11a12ba00a169618d510f967132ab738f24151e4)) - -* Update openephys.py ([`85c7c8b`](https://github.com/datajoint/element-array-ephys/commit/85c7c8ba12154fff8695b1a66d9db96440e8bb08)) - -* configurable `paramset_idx` for auto ClusteringTask generation ([`39c8579`](https://github.com/datajoint/element-array-ephys/commit/39c8579821aec505fffaf40c8e41d833ec9f775f)) - -* bugfix ([`769de13`](https://github.com/datajoint/element-array-ephys/commit/769de136cfd91e3ae57228d657af405a302ebeaf)) - -* Update __init__.py ([`e0a9a4f`](https://github.com/datajoint/element-array-ephys/commit/e0a9a4f38fe74e790e1d1c70cf3b7ab1d68f8f8a)) - -* delete nwb export - rename `ephys` module -> `ephys_acute` ([`c2f8aea`](https://github.com/datajoint/element-array-ephys/commit/c2f8aeaa7aad48c602484b9358a569d2b54d69c4)) - -* remove unintended prototyping work ([`83649f5`](https://github.com/datajoint/element-array-ephys/commit/83649f5edf2ddb4788e5b361ec7153238ecbfee4)) - -* handle older open ephys format for single probe ([`bdcfa46`](https://github.com/datajoint/element-array-ephys/commit/bdcfa46ea7d1d55af48b22556b9943c9d44b5fff)) - -* Update requirements.txt ([`f0b3d4a`](https://github.com/datajoint/element-array-ephys/commit/f0b3d4a7b6c77c6bb295ebb558458caacaa1543a)) - -* Update requirements.txt ([`7320f9f`](https://github.com/datajoint/element-array-ephys/commit/7320f9f6b966548145b4a39b5bbf87b0d5c8d6e3)) - -* Update requirements.txt ([`cb1a041`](https://github.com/datajoint/element-array-ephys/commit/cb1a0419d8707808b1f1e0599358be6c42a00bd2)) - -* rename `sess_dir` -> `session_dir` ([`03cab02`](https://github.com/datajoint/element-array-ephys/commit/03cab02ee709e94b151621d00b12e455953dccfb)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`f4052cc`](https://github.com/datajoint/element-array-ephys/commit/f4052cc5d079e6ea065ce6f6aab34b462250ba39)) - -* name tweak ([`779b2fb`](https://github.com/datajoint/element-array-ephys/commit/779b2fb95b277768cb15046a4eea4e7138e23749)) - -* minor bugfix ([`d66368c`](https://github.com/datajoint/element-array-ephys/commit/d66368c7f016e244b4a81c1aa2f6b7d7bbc4d15d)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into no-curation ([`b4832ea`](https://github.com/datajoint/element-array-ephys/commit/b4832ea82416a7d76675811b2936c062185514f8)) - -* DEPRECATING NWB EXPORT ([`4951b39`](https://github.com/datajoint/element-array-ephys/commit/4951b396a1b97a9dda3aacdc974c23f64d8bfd9f)) - -* Merge pull request #4 from A-Baji/no-curation - -all three ephys files up to date ([`9dd6b42`](https://github.com/datajoint/element-array-ephys/commit/9dd6b42fc77e18089a419e10457013c86380db0c)) - -* applied requested changes ([`c56cd18`](https://github.com/datajoint/element-array-ephys/commit/c56cd188357f84da89c72c56d74b6bdf6913f11e)) - -* all three ephys files up to date ([`f2881ca`](https://github.com/datajoint/element-array-ephys/commit/f2881ca0d2c61dfcdeff9794f6fe26b1ecb6a066)) - -* Merge pull request #3 from ttngu207/nwb-export - -bugfix in assigning unit electrode indices ([`f9a4754`](https://github.com/datajoint/element-array-ephys/commit/f9a4754355fe556c6a7ae28f068556568a9966a1)) - -* bugfix in assigning unit electrode indices ([`31bba8c`](https://github.com/datajoint/element-array-ephys/commit/31bba8ca4061f6225ea111649929a0e2eb942615)) - -* include probe as part of the electrode_group name for uniqueness ([`aa47c8a`](https://github.com/datajoint/element-array-ephys/commit/aa47c8a761e1149fd386a1e2ca910670f24e18b7)) - -* version bump ([`8657d58`](https://github.com/datajoint/element-array-ephys/commit/8657d58557cc755292d1be4638b4f8bf3c4af1ed)) - -* fix NWB export - null `waveforms` - back to version 0.1.0b1 ([`dae36d1`](https://github.com/datajoint/element-array-ephys/commit/dae36d1f4793047b1a1ac684fc0f37f529d1c9aa)) - -* version bump ([`6107e8e`](https://github.com/datajoint/element-array-ephys/commit/6107e8e22f29b858a2ef139ea5890b7cfa86b80b)) - -* NWB export fix, specifying which ephys module ([`8dff08b`](https://github.com/datajoint/element-array-ephys/commit/8dff08b5c094c0867cea242738f8a9e1ffbca6ba)) - -* handles multi-probe for older OE version ([`02c4b67`](https://github.com/datajoint/element-array-ephys/commit/02c4b671c2f58aaf19a966af266d2e56d25f8a86)) - -* openephys loader - handles signalchain and processor as single element or list ([`2022e91`](https://github.com/datajoint/element-array-ephys/commit/2022e91079ecb021b5a3b0cc0771631206692c9c)) - -* for pykilosort's probe, provide both Nchan and NchanTOT ([`1c39568`](https://github.com/datajoint/element-array-ephys/commit/1c39568045b65ac4de3597e3d539311efdb033c1)) - -* handle missing `sample_rate` from pykilosort params.py ([`142459d`](https://github.com/datajoint/element-array-ephys/commit/142459d6f21d096b1490e301cfbbeb22ac370e6c)) - -* bugfix in triggering pykilosort ([`02069c9`](https://github.com/datajoint/element-array-ephys/commit/02069c94b8b088a2f16b58f5c7224f17a3920cd5)) - -* clusters extraction - check `cluster_group.tsv` and `cluster_KSLabel.tsv` ([`da10c66`](https://github.com/datajoint/element-array-ephys/commit/da10c66caf6a99f0a0f63f89c94136f5470983c7)) - -* handles extraction of `connected` channels for NP_PROBE format in OpenEphys ([`43d6614`](https://github.com/datajoint/element-array-ephys/commit/43d6614f8b4a36aebcb81ed49500d890f35de1f7)) - -* bugfix, timedelta as seconds ([`adffe34`](https://github.com/datajoint/element-array-ephys/commit/adffe34ea52ed2f0550a2063b843b72d67d90ef2)) - -* bugfix - extract recording datetime (instead of using experiment datetime) ([`c213325`](https://github.com/datajoint/element-array-ephys/commit/c21332543e23f647e0198e9eba0881f256e85a87)) - -* bugfix openephys loader ([`0d16e7e`](https://github.com/datajoint/element-array-ephys/commit/0d16e7ed61ec7911d97db7077b6f11498709cf73)) - -* search recording channels for Open Ephys based on channel names ([`d105419`](https://github.com/datajoint/element-array-ephys/commit/d1054195a63608f041a62dd49d9134fae80dc89b)) - -* bugfix in electrode sites design for Neuropixels UHD probe ([`f55a6a7`](https://github.com/datajoint/element-array-ephys/commit/f55a6a7dfd373efe1405aeea050a0f8fe0b9e6f8)) - -* supporting `neuropixels UHD` in `ephys.EphysRecording` ([`db3027b`](https://github.com/datajoint/element-array-ephys/commit/db3027b7b48eca7e281cfa93c23b546545457ed4)) - -* handles format differences between npx1 vs 3A ([`e325a30`](https://github.com/datajoint/element-array-ephys/commit/e325a30d1ab4879077d59d773b50cb05998168bd)) - -* fix package requirement formatting error ([`af2b18b`](https://github.com/datajoint/element-array-ephys/commit/af2b18ba88287f234ab0ade5c76210a57eed718b)) - -* update openephys loader ([`4250220`](https://github.com/datajoint/element-array-ephys/commit/4250220c7933f47135208c31a5d0e6c46a2d8518)) - -* minor bugfix in running pykilosort ([`b6f8f99`](https://github.com/datajoint/element-array-ephys/commit/b6f8f99dbc8b2bc56ca2f6484d1f8f09f8056944)) - -* using fork of pyopenephys ([`96931a4`](https://github.com/datajoint/element-array-ephys/commit/96931a4fdfcaebcee2853dac3835e7fdf954524f)) - -* use_C_waves=False for OpenEphys ([`81d99c8`](https://github.com/datajoint/element-array-ephys/commit/81d99c8c4c6901daa23d01e19e760d3b2d737a6f)) - -* first prototype for pykilosort ([`819ff19`](https://github.com/datajoint/element-array-ephys/commit/819ff193f326e950e6f06dc4ed4785a4ba96be0b)) - -* triggering kilosort analysis for open-ephys ([`df599fb`](https://github.com/datajoint/element-array-ephys/commit/df599fbe88e0ffc4694c33479c1247c22f66760e)) - -* add `neuropixels UHD` probe type ([`ddc3b94`](https://github.com/datajoint/element-array-ephys/commit/ddc3b9429b53ea6d7e5889171884522e7a05dbad)) - -* specify additional recording-info as part of the `params` ([`58b5984`](https://github.com/datajoint/element-array-ephys/commit/58b598473ad6c83c8966176d484a0f23c8056a6b)) - -* bugfix for running kilosort for Open Ephys data ([`199a2ba`](https://github.com/datajoint/element-array-ephys/commit/199a2baf43eadc1028961991173d5b010d31bc39)) - -* first prototype for running the ecephys_pipeline with OpenEphys ([`49ca0be`](https://github.com/datajoint/element-array-ephys/commit/49ca0beded17dd3d613b498223cbced3ce5480f1)) - -* add nwb export to `no-curation` ephys ([`b25f065`](https://github.com/datajoint/element-array-ephys/commit/b25f065f64735727ddae3e5e6ef907ba7368bfb9)) - -* Merge pull request #2 from ttngu207/nwb-export - -Nwb export ([`3ebdf23`](https://github.com/datajoint/element-array-ephys/commit/3ebdf236cb6f20cf0458f21b4358ca5b8b13c958)) - -* Update nwb.py ([`19616ef`](https://github.com/datajoint/element-array-ephys/commit/19616ef695d2546bfd441b32ea0df4a668488392)) - -* handle NWB export with multiple curated clusterings from one session ([`d07f830`](https://github.com/datajoint/element-array-ephys/commit/d07f830dc9384193164919399fb57605b3ea96c7)) - -* added NWB export ([`f740aef`](https://github.com/datajoint/element-array-ephys/commit/f740aef79c0b87a0e3b951c58e36daf701705195)) - -* minor bugfix ([`09c1e60`](https://github.com/datajoint/element-array-ephys/commit/09c1e6072dc681898e5edf9e8e866e9519ac76bd)) - -* stylistic improvements, addressing code review comments ([`e8ffe17`](https://github.com/datajoint/element-array-ephys/commit/e8ffe17711ad66bbf5011aef9bcff3f7ed2afe76)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`74e3ead`](https://github.com/datajoint/element-array-ephys/commit/74e3eadc0c722bef43901f075434142314604077)) - -* check `ap.bin` file validity before triggering kilosort (based on filesize) ([`beaf765`](https://github.com/datajoint/element-array-ephys/commit/beaf7651cffa67e1bc8a10b19cad49dde8e6530e)) - -* duration for each module run ([`19b704b`](https://github.com/datajoint/element-array-ephys/commit/19b704b70af25ff8e3b40d28ac1858748007c9bb)) - -* bugfix logging for kilosort triggering ([`f34e95d`](https://github.com/datajoint/element-array-ephys/commit/f34e95dcda3ba8379cfe9e860277d69f1336db37)) - -* minor bugfix ([`55bec01`](https://github.com/datajoint/element-array-ephys/commit/55bec0122f077bbb4b8ac90516da6d0a78dc8630)) - -* stage tracking and resumable kilosort run ([`408532c`](https://github.com/datajoint/element-array-ephys/commit/408532cf9b685b9a57f59831ba3fd16d0982ea97)) - -* minor cleanup ([`dc7ddd9`](https://github.com/datajoint/element-array-ephys/commit/dc7ddd912de849b8b63335bf33e700a630d117db)) - -* improve clusteringtask and waveform ingestion routine ([`c2ee64f`](https://github.com/datajoint/element-array-ephys/commit/c2ee64f52e6ce1be7062584dbd90129bae6cb891)) - -* new version 0.1.0b1 ([`67341d8`](https://github.com/datajoint/element-array-ephys/commit/67341d8f5470622e6c4e58a0b7f3ae3989c281a7)) - -* Update kilosort.py ([`0f0c212`](https://github.com/datajoint/element-array-ephys/commit/0f0c21249de2c81cef480df219f3c005a62f3b78)) - -* bugfix - no dir created ([`044c389`](https://github.com/datajoint/element-array-ephys/commit/044c389fae07540621a36af6568a35199cb3006a)) - -* add debugging lines ([`b9f4e92`](https://github.com/datajoint/element-array-ephys/commit/b9f4e9208b80f113b70f66d46fd6d4424dde4ec0)) - -* log the folder creation ([`ae966aa`](https://github.com/datajoint/element-array-ephys/commit/ae966aa9f55d7b835995c9d5a2b1e39f03e3f4ea)) - -* bugfix, convert path to string ([`94aade7`](https://github.com/datajoint/element-array-ephys/commit/94aade7212775ccc275979e05d17cb195bb665de)) - -* bugfix ([`28c4452`](https://github.com/datajoint/element-array-ephys/commit/28c445279f310822260c5df78f906c7bf77a3764)) - -* updating `kilosort_repository` depending on which KSVer to be used ([`38c5be6`](https://github.com/datajoint/element-array-ephys/commit/38c5be6fd8d1d1225c61c28eecabaf311e605694)) - -* include `clustering_method` into the calculation of `param_set_hash` ([`acdab12`](https://github.com/datajoint/element-array-ephys/commit/acdab125acfc62792fa4fa18ad0ba403d16a5da8)) - -* make variable naming consistent ([`a0ea9f7`](https://github.com/datajoint/element-array-ephys/commit/a0ea9f70c8dec59c9d415bd9de2e219ea69d0e81)) - -* add kilosort 2.5 as default content ([`a6cae12`](https://github.com/datajoint/element-array-ephys/commit/a6cae1291534e929c451d422da7083241f3418f9)) - -* minor bugfix ([`69c5e51`](https://github.com/datajoint/element-array-ephys/commit/69c5e5144c4210bd2248c354b56c6ba1bc4f6a47)) - -* change default `noise_template_use_rf` to False ([`c593baf`](https://github.com/datajoint/element-array-ephys/commit/c593bafbac84334c1e388fe96817494085878aed)) - -* missing generate module json ([`375e437`](https://github.com/datajoint/element-array-ephys/commit/375e437861d33791147d2913a8ea94d8031c12d6)) - -* bugfix ([`d63561f`](https://github.com/datajoint/element-array-ephys/commit/d63561f74ff7dbf1bc87922f74b5f55ad0bd5cd6)) - -* handle cases where `fileTimeSecs` is not available ([`6788180`](https://github.com/datajoint/element-array-ephys/commit/6788180682f8d2ff4ee3bdc0a6a01dd61814c67f)) - -* bugfix in triggering ecephys_spike_sorting ([`6bf0eb1`](https://github.com/datajoint/element-array-ephys/commit/6bf0eb100e0e5480e8824644ff1c3b638e889c24)) - -* minor tweak/improvements in kilosort triggering ([`f699ce7`](https://github.com/datajoint/element-array-ephys/commit/f699ce7e3af7c387579f133c763046f6e55517f4)) - -* Update kilosort_trigger.py ([`dd01fd2`](https://github.com/datajoint/element-array-ephys/commit/dd01fd2158d3f09f314e705b2c08f5e1b4205085)) - -* flag to create spike sorting output dir ([`6c646bb`](https://github.com/datajoint/element-array-ephys/commit/6c646bbcc2437f00c8de4f99aa1d6738c5acd09f)) - -* fix missing `clustering_method` ([`5cdc994`](https://github.com/datajoint/element-array-ephys/commit/5cdc994f47387f7935c028a1fd38e59e7d4c31e3)) - -* handles a weird windows/unix path incompatibility (even with pathlib) ([`ba28637`](https://github.com/datajoint/element-array-ephys/commit/ba28637496fbea77207a81fae3e6a287c56b494a)) - -* Merge branch 'no-curation' of https://github.com/ttngu207/element-array-ephys into no-curation ([`a24bd1a`](https://github.com/datajoint/element-array-ephys/commit/a24bd1a700d7d8fed61c6c0f5e51c2482cbc5bbf)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`149ef3f`](https://github.com/datajoint/element-array-ephys/commit/149ef3f4ab5294399d0553acec05b00b8d79226b)) - -* typo fix ([`9f98195`](https://github.com/datajoint/element-array-ephys/commit/9f981951d9222af132a51a85267a2c278f893f27)) - -* minor stylistic improvements ([`a9326ea`](https://github.com/datajoint/element-array-ephys/commit/a9326eaa015c46875829dbf92d6494aa5c0a3d85)) - -* remove `_required_packages_paths` ([`60105da`](https://github.com/datajoint/element-array-ephys/commit/60105da78fcc5e55be961c7c79fdb206b072de26)) - -* triggering Kilosort with ecephys_spike_sorting package ([`047bfa1`](https://github.com/datajoint/element-array-ephys/commit/047bfa1cd33fadefe42e9f395f119c97e894d1d1)) - -* automate ClusteringTask insertion ([`0d56456`](https://github.com/datajoint/element-array-ephys/commit/0d5645668f18d8b9011e46817bd0f31dda73a088)) - -* bugfix ([`a7e7554`](https://github.com/datajoint/element-array-ephys/commit/a7e755481215b67917b9678c5c795634db775c03)) - -* Update ephys_no_curation.py ([`70e93b3`](https://github.com/datajoint/element-array-ephys/commit/70e93b381c1f5fcb45b01a647b5be0fc4fbbbdf0)) - -* using `find_full_path` for session dir as well - improve robustness ([`5420ae0`](https://github.com/datajoint/element-array-ephys/commit/5420ae05ddf63c56e729f4229547fe5df99b3d58)) - -* no-curation, store processed data in user-specified `processed_data_dir` if provided ([`4397dd7`](https://github.com/datajoint/element-array-ephys/commit/4397dd7217b4595dc7fef498e6db456373ce50df)) - -* helper for `ProbeInsertion` - `auto_generate_entries(session_key)` ([`de84ce0`](https://github.com/datajoint/element-array-ephys/commit/de84ce0529c13eed5d1c1199062663f6a3888af2)) - -* improve kilosort loading routine - add `validate()` method ([`b7c0845`](https://github.com/datajoint/element-array-ephys/commit/b7c0845bc0514f3f435d79f6ba6fff86693b166a)) - -* minor bug fix ([`adfad95`](https://github.com/datajoint/element-array-ephys/commit/adfad9528d17689714d11b1ac1710d6f1a74756a)) - -* make `clustering_output_dir` user-input optional, auto infer ([`590310e`](https://github.com/datajoint/element-array-ephys/commit/590310ea8fc0829e60fd1113d99e83e75c78142d)) - -* remove `Curation` ([`a39a9b1`](https://github.com/datajoint/element-array-ephys/commit/a39a9b1b456c5b6fd49c2faa43f5d551e5f7901c)) - -* copied `ephys` to `ephys-no-curation`, added `recording_duration`, make ([`042cc46`](https://github.com/datajoint/element-array-ephys/commit/042cc460f48429e3b7c20eb01d49861d01357192)) - -* Update README ([`cdb9182`](https://github.com/datajoint/element-array-ephys/commit/cdb9182880dcca7f1070a2e2554d513488911cb2)) - -* Populated .md files ([`d8fca5b`](https://github.com/datajoint/element-array-ephys/commit/d8fca5bac61e8aacd3a24cffe8c6641e76512a05)) - -* Merge pull request #96 from ttngu207/main - -bugfix - remove % in attributes' comments AND add QC to `ephys_precluster` ([`4a6bc31`](https://github.com/datajoint/element-array-ephys/commit/4a6bc31e026de1869292e018efc5a817d01969e5)) - -* add QC to `ephys_precluster` ([`e21302f`](https://github.com/datajoint/element-array-ephys/commit/e21302f5a4c80b27aa4e66bf51e99f33153c4ebf)) - -* bugfix - remove `%` in attributes' comments ([`57a1c1d`](https://github.com/datajoint/element-array-ephys/commit/57a1c1d7d067f683e321656f669ad1c14be25fbe)) - -* Merge pull request #87 from ttngu207/main - -QC metrics ([`54c8413`](https://github.com/datajoint/element-array-ephys/commit/54c84137bbe55841b0e0db6079e0259d5240390f)) - -* Update CHANGELOG.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`7ce35ab`](https://github.com/datajoint/element-array-ephys/commit/7ce35ab8f47de621d5c0df3831c86c050a13b886)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`100cf84`](https://github.com/datajoint/element-array-ephys/commit/100cf84cbb5a8e59ae98b57b63aa7ec789669a34)) - -* apply PR review's suggestions ([`0a94aa9`](https://github.com/datajoint/element-array-ephys/commit/0a94aa9b2519e8168b297bcb7f050ed3894774e3)) - -* Merge branch 'datajoint:main' into main ([`b126801`](https://github.com/datajoint/element-array-ephys/commit/b126801d6bc876465e47fcb10f7fc19aeb215183)) - -* bump version, add to CHANGELOG ([`c250857`](https://github.com/datajoint/element-array-ephys/commit/c25085792607a23021607d5c35bbde7d2890a32f)) - -* Apply suggestions from code review ([`35e8193`](https://github.com/datajoint/element-array-ephys/commit/35e8193fbc3d199fbf99932623bbafaa371fca12)) - -* code cleanup ([`7f948f7`](https://github.com/datajoint/element-array-ephys/commit/7f948f70e19b13f32d9e6ce6c178f34d6a5db665)) - -* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys ([`65554dc`](https://github.com/datajoint/element-array-ephys/commit/65554dc51486b1154c0f9bb844e6dfe18e2774cd)) - -* Merge branch 'datajoint:main' into main ([`d884f01`](https://github.com/datajoint/element-array-ephys/commit/d884f01349358c82a9cc15414b7ba660f9fa2bad)) - -* add QC metrics ([`1773e23`](https://github.com/datajoint/element-array-ephys/commit/1773e23329852c96c8ba92c96144ea07e2b07036)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`4ab2a6c`](https://github.com/datajoint/element-array-ephys/commit/4ab2a6c9544e653efc50380c6117ca8f30facc5e)) - -* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into main ([`b29b6b0`](https://github.com/datajoint/element-array-ephys/commit/b29b6b0ba0f715e00b378fbaf25e0af4e94373aa)) - -* Merge pull request #95 from guzman-raphael/docs - -Documentation revamp ([`efedd37`](https://github.com/datajoint/element-array-ephys/commit/efedd37c5bb4798ad7ba7975d0506490e440f9f6)) - -* Update with recent styling updates. ([`c73c7b5`](https://github.com/datajoint/element-array-ephys/commit/c73c7b59f3ad1103673e7e30b0becaa49f7c3a9e)) - -* Remove unneeded comment. ([`0a8a67c`](https://github.com/datajoint/element-array-ephys/commit/0a8a67cf9c44b54c06d16293d1f64c5da755d1a0)) - -* Upgrade documentation to latest design and automation. ([`a304d31`](https://github.com/datajoint/element-array-ephys/commit/a304d316d3fb0df4e186c68b9a20ffcfd4e80457)) - -* Create u24_element_release_call.yml ([`3b98067`](https://github.com/datajoint/element-array-ephys/commit/3b98067cbdd4a617f6c7de4b21ed475d953866dc)) - -* Create u24_element_tag_to_release.yml ([`ddd6a18`](https://github.com/datajoint/element-array-ephys/commit/ddd6a189bc787dcf4e8174cb10e1e762ff912598)) - -* Create u24_element_before_release.yml ([`ac9035d`](https://github.com/datajoint/element-array-ephys/commit/ac9035dfb6dea9d24aecf11487794b00ca0c8b12)) - -* Merge pull request #84 from A-Baji/documentation - -Create documentation template ([`22c95a8`](https://github.com/datajoint/element-array-ephys/commit/22c95a8dee0ac264ed990d8a785b8b1e25e4ef41)) - -* code review updates ([`87b4e40`](https://github.com/datajoint/element-array-ephys/commit/87b4e40c839f8e888d2c31a789c4297289ac4582)) - -* update deploy-docs wf condition ([`9ff6089`](https://github.com/datajoint/element-array-ephys/commit/9ff6089e361156cec3184709c5d72fee4582efc2)) - -* enable wf ([`9b56f01`](https://github.com/datajoint/element-array-ephys/commit/9b56f0144d0c1d9035e2be94cd52e24e11bab477)) - -* fix logos ([`4a61ff2`](https://github.com/datajoint/element-array-ephys/commit/4a61ff294ace685edc71618cf5c0bf034ad11e4d)) - -* enable wf ([`6e0b460`](https://github.com/datajoint/element-array-ephys/commit/6e0b460055d60bad9c68a649f07ee928d54d5dee)) - -* disable wf ([`25c4876`](https://github.com/datajoint/element-array-ephys/commit/25c4876a6ee0205fd6387eaeff88e71d28e23ded)) - -* add permalin, fix mobile color, update logo ([`9ef29f2`](https://github.com/datajoint/element-array-ephys/commit/9ef29f26bfa8687d2f3c651e1368d50c1b27212f)) - -* github actions ([`3e3a943`](https://github.com/datajoint/element-array-ephys/commit/3e3a94353219989be282f70b68c22c53af4481b6)) - -* fix landing page nav link ([`fdbe144`](https://github.com/datajoint/element-array-ephys/commit/fdbe1441235894a4b21697555004c2b577af0ce1)) - -* open link in new tab example ([`d8a1607`](https://github.com/datajoint/element-array-ephys/commit/d8a1607fba5c3244185db51cb593314706f75eac)) - -* update override ([`7cfd746`](https://github.com/datajoint/element-array-ephys/commit/7cfd74643f751d38525f77d17524fd5664a85e10)) - -* fix navigation override ([`88c5ed1`](https://github.com/datajoint/element-array-ephys/commit/88c5ed1037e6d5a156e038383975e68d0ec8d18b)) - -* cleanup navigation layout ([`d0e7abb`](https://github.com/datajoint/element-array-ephys/commit/d0e7abbfe476aa45edecdb0c50573274ab44d0c6)) - -* add datajoint social link ([`052e589`](https://github.com/datajoint/element-array-ephys/commit/052e589ee403e0435b53d32f3c4f498b736f191c)) - -* change light mode footer color ([`111fcf5`](https://github.com/datajoint/element-array-ephys/commit/111fcf51899c4ee6602e308067a6a9e3f566d48b)) - -* re enable wfs ([`3c2226d`](https://github.com/datajoint/element-array-ephys/commit/3c2226d89f8aff13c0c5f0bef196944b80694ef0)) - -* disable wfs ([`7def7f3`](https://github.com/datajoint/element-array-ephys/commit/7def7f3364d1472a09fd9bec74a74eb2b7758d89)) - -* change dark theme ([`2dca304`](https://github.com/datajoint/element-array-ephys/commit/2dca304aacf1ba89f464703c3fed390438ad74fb)) - -* dark mode tweak ([`26a2818`](https://github.com/datajoint/element-array-ephys/commit/26a2818e4885750928761a2d2db0884e5bdad4ab)) - -* update source block dark mode color ([`0c889e2`](https://github.com/datajoint/element-array-ephys/commit/0c889e2e8a9183ce48cad4c22654239d92e7dba4)) - -* tweak docstring example ([`a0e223a`](https://github.com/datajoint/element-array-ephys/commit/a0e223aec75c626d05786657b4f2b45b1b31a9d9)) - -* add social links to footer ([`656242e`](https://github.com/datajoint/element-array-ephys/commit/656242ebddfaccae726d664fa2b4e4c7b6603f0f)) - -* landing page nav link ([`b5b8bab`](https://github.com/datajoint/element-array-ephys/commit/b5b8bab6d362e37d796dfb5531a65d9cb35b0eb6)) - -* re enable wf ([`109965b`](https://github.com/datajoint/element-array-ephys/commit/109965b504047147203647815b1c87f07d043c62)) - -* disable other wf ([`a7b3d11`](https://github.com/datajoint/element-array-ephys/commit/a7b3d116c969875c20adcd1e33142f34e67f133e)) - -* apply suggestions from review ([`9f5753c`](https://github.com/datajoint/element-array-ephys/commit/9f5753c1deba2ea7ac21cdfb08be63031b2d258b)) - -* disable mike install ([`d1ee89b`](https://github.com/datajoint/element-array-ephys/commit/d1ee89b8e097a521d45647d7413c1c635b1ed6ae)) - -* re enable other gh action wf ([`9b70fa1`](https://github.com/datajoint/element-array-ephys/commit/9b70fa1d7103b340dda95d9b3b21da7c8a540679)) - -* disable other gh action wf ([`f1b1868`](https://github.com/datajoint/element-array-ephys/commit/f1b186835cb0bf1a7e98f731efb3ff42d336c4b0)) - -* move docker files to docs/ ([`33d6fcc`](https://github.com/datajoint/element-array-ephys/commit/33d6fcc39b52e7da1e698945fbd991bf8660f434)) - -* comment cleanup ([`5a936af`](https://github.com/datajoint/element-array-ephys/commit/5a936afd88b006948fb89cc39e625429139e3db2)) - -* add mike workflow example ([`03b6dc3`](https://github.com/datajoint/element-array-ephys/commit/03b6dc395c8168aac8f616daf59971c38872685d)) - -* add mike for future use ([`6f7eedf`](https://github.com/datajoint/element-array-ephys/commit/6f7eedf4ca16b065e6e7b2e3ec6c061ba6b8608e)) - -* re enable other wf jobs ([`9bdb418`](https://github.com/datajoint/element-array-ephys/commit/9bdb4187d8a9c72f8c414147a37875200d72b2cc)) - -* add missing dependencies ([`5c44e80`](https://github.com/datajoint/element-array-ephys/commit/5c44e8018882d66b7f73c32ecac7fe6202737ed0)) - -* add config file path ([`00d3ca2`](https://github.com/datajoint/element-array-ephys/commit/00d3ca22e2fa6634fb62ac94c5958c233a218528)) - -* disable other jobs ([`5469e77`](https://github.com/datajoint/element-array-ephys/commit/5469e77a48483c4870c42721c89802d536044ef7)) - -* docker and github wf ([`730614b`](https://github.com/datajoint/element-array-ephys/commit/730614b8feb1b966e2150813ad9339f13a1a9bff)) - -* small change ([`f40c188`](https://github.com/datajoint/element-array-ephys/commit/f40c1887bbcc1b6e1111258d35659709eb7f4cc8)) - -* move docs to src ([`33224a9`](https://github.com/datajoint/element-array-ephys/commit/33224a9087ad988fb0b4429deca4d385017d3043)) - -* cleanup ([`4399034`](https://github.com/datajoint/element-array-ephys/commit/4399034e9ae66ae2cbcd6df998f0c65466147c16)) - -* clean up and tweak dark mode theme ([`72e3aa6`](https://github.com/datajoint/element-array-ephys/commit/72e3aa6c29646351a602908696b84b7b852d31d7)) - -* tweak dark mode theme for codeblocks ([`95e3925`](https://github.com/datajoint/element-array-ephys/commit/95e3925d85a1d1da5122bba160c89afe6c70f49c)) - -* docstring example ([`c4d3bde`](https://github.com/datajoint/element-array-ephys/commit/c4d3bde2fb9c4e4bb5f9981468c360808428fe0a)) - -* light and dark themes ([`b1f7399`](https://github.com/datajoint/element-array-ephys/commit/b1f7399984bc3f3c1707b43723c342ca6b8cd42e)) - -* dj light theme ([`724d870`](https://github.com/datajoint/element-array-ephys/commit/724d870489c9346044bc305065622dee04a05f0e)) - -* set up mkdocs ([`f2a5e7c`](https://github.com/datajoint/element-array-ephys/commit/f2a5e7c0bcd7c85f83a535a53a739cda4e81e026)) - -## v0.1.4 (2022-07-11) - -### Unknown - -* Merge pull request #83 from kabilar/main - -Fix for `spike_depths` attribute ([`ee0e179`](https://github.com/datajoint/element-array-ephys/commit/ee0e179d0ed02212f03a36382a07409b6ba2f823)) - -* Update changelog ([`a97dd3c`](https://github.com/datajoint/element-array-ephys/commit/a97dd3c4fd468832721d6bedc3b796a89f01b3b9)) - -* Fix if statement ([`c66ff8f`](https://github.com/datajoint/element-array-ephys/commit/c66ff8f7311768f6916b859319570a4d267a423f)) - -* Update changelog ([`0da5e91`](https://github.com/datajoint/element-array-ephys/commit/0da5e915a97f6c9b1ed5d5f8c8b7c551def38440)) - -* Update changelog and version ([`1865be6`](https://github.com/datajoint/element-array-ephys/commit/1865be641b4a62b87acb8b5a68c0ceb8914aede8)) - -* Fix for truth value of array ([`787d33d`](https://github.com/datajoint/element-array-ephys/commit/787d33d6ce478976a2c2e49d72fe11be90b5782f)) - -## v0.1.3 (2022-06-16) - -### Unknown - -* Merge pull request #79 from kabilar/main - -Update `precluster_output_dir` to nullable ([`ecd6a4c`](https://github.com/datajoint/element-array-ephys/commit/ecd6a4c0212ebb54dd1d256a384aa0b8bf7785f7)) - -* Set precluster_output_dir to nullable ([`90f3ed1`](https://github.com/datajoint/element-array-ephys/commit/90f3ed177587dc364e9b2548afb515809b549ec8)) - -## v0.1.2 (2022-06-09) - -### Unknown - -* Merge pull request #78 from kabilar/main - -Fix for case where `pc_features.npy` does not exist ([`a01530c`](https://github.com/datajoint/element-array-ephys/commit/a01530ca2216787f2b69906f596a4b785323cf50)) - -* Fix format ([`6b6f448`](https://github.com/datajoint/element-array-ephys/commit/6b6f448c9b3ca88d3106b37bb5a7bb474ce4d157)) - -* Update element_array_ephys/ephys_chronic.py ([`558e0b9`](https://github.com/datajoint/element-array-ephys/commit/558e0b94537e0d3b3c9c3d83823e9ff8a9212c57)) - -* Update element_array_ephys/ephys_acute.py ([`44dbe8c`](https://github.com/datajoint/element-array-ephys/commit/44dbe8cc84bf009bee1abdf22adc118ee6564457)) - -* Update element_array_ephys/readers/kilosort.py - -Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`a392e57`](https://github.com/datajoint/element-array-ephys/commit/a392e57868ec9d9b356cf3c1a6e57b0dc33fbb1b)) - -* Update element_array_ephys/ephys_precluster.py - -Co-authored-by: Thinh Nguyen <thinh@vathes.com> ([`b3922fc`](https://github.com/datajoint/element-array-ephys/commit/b3922fc58e213b52ca0481c45ca0bcc7a01d1e1c)) - -* Update version and changelog ([`3a2671a`](https://github.com/datajoint/element-array-ephys/commit/3a2671a1b4d4dff344ac3431357482a4ce5c270c)) - -* Handle case where pc_features does not exist ([`c16fda2`](https://github.com/datajoint/element-array-ephys/commit/c16fda209410974116eea0bc893eb8542ca2afa0)) - -* Flatten channel map ([`cdce624`](https://github.com/datajoint/element-array-ephys/commit/cdce624300b20272d5662f0fdb7ec20d436148e1)) - -* Handle case where pc_features does not exist ([`c428e47`](https://github.com/datajoint/element-array-ephys/commit/c428e47c17a69fd0812cf4ad224db0ccff0ca036)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`d53f7a9`](https://github.com/datajoint/element-array-ephys/commit/d53f7a9228dc03c86c0ccb8392f39bc8d67d3b40)) - -## v0.1.1 (2022-06-01) - -### Unknown - -* Merge pull request #72 from kabilar/main - -Add attributes to describe parameter list ([`a20ab9b`](https://github.com/datajoint/element-array-ephys/commit/a20ab9b7879e8cf4131206bf3dbb099d557b0233)) - -* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`d618c55`](https://github.com/datajoint/element-array-ephys/commit/d618c5577f951791cc646ba6617847058b65516f)) - -* Update CHANGELOG.md ([`81e1643`](https://github.com/datajoint/element-array-ephys/commit/81e164399caf1ee14141890a02c904d23530d6d5)) - -* Update element_array_ephys/ephys_precluster.py ([`34a544e`](https://github.com/datajoint/element-array-ephys/commit/34a544e08509edf46962b0cf0c3753477100e732)) - -* Set spike_depths as nullable attribute ([`4142468`](https://github.com/datajoint/element-array-ephys/commit/4142468ba4f83fd95da6a6f988e95f97b5907555)) - -* Update length ([`2ce12c1`](https://github.com/datajoint/element-array-ephys/commit/2ce12c1cfed6e4e983dd19cfcb5f0d021f49c845)) - -* Update diff ([`8366b60`](https://github.com/datajoint/element-array-ephys/commit/8366b60c641b15af5aa40720248564fd606e4bd5)) - -* Update version and changelog ([`ad9a4b9`](https://github.com/datajoint/element-array-ephys/commit/ad9a4b97d741a72fc18c093c5806f9a732dac54b)) - -* Add description attribute ([`08fb06a`](https://github.com/datajoint/element-array-ephys/commit/08fb06af3ad45be1c8dcf1576303d3c085f8593e)) - -* Merge pull request #65 from kabilar/main - -Add `ephys_precluster` module ([`3eeae51`](https://github.com/datajoint/element-array-ephys/commit/3eeae51bd34570c95dcec945eb1b55771edeb902)) - -* Add ephys_chronic image ([`c82c23b`](https://github.com/datajoint/element-array-ephys/commit/c82c23b72e169b0bb5b2cc3057b5af6323e052d6)) - -* Add precluster image ([`a31abba`](https://github.com/datajoint/element-array-ephys/commit/a31abba2ef5f5bf8d49449dda4a20286765d8a87)) - -* Raise error ([`92e30ee`](https://github.com/datajoint/element-array-ephys/commit/92e30ee03194ea5cf1eeba076c324f73c9b7ebf6)) - -* Merge branch 'main' of kabilar/element-array-ephys ([`cd31e0b`](https://github.com/datajoint/element-array-ephys/commit/cd31e0b292ffda20cc0d0f6096591fc7e1350329)) - -* Update element_array_ephys/ephys_precluster.py ([`5bbb727`](https://github.com/datajoint/element-array-ephys/commit/5bbb727d4ccccdab1add41c21ad505fddde94ebe)) - -* Update name ([`3df0981`](https://github.com/datajoint/element-array-ephys/commit/3df0981f7011d6871bf3772f4ba98917e89cf80b)) - -* Update changelog ([`bbe9f3f`](https://github.com/datajoint/element-array-ephys/commit/bbe9f3f445efb35f287405469b6226d0bd4a2f7e)) - -* Update version ([`44c86bf`](https://github.com/datajoint/element-array-ephys/commit/44c86bfdf665f71946375775d5b1ac12323b08d5)) - -* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`2bd2234`](https://github.com/datajoint/element-array-ephys/commit/2bd2234030f477205108054ec70dc195e5ebae8c)) - -* Update element_array_ephys/ephys_precluster.py ([`dc0fc1f`](https://github.com/datajoint/element-array-ephys/commit/dc0fc1f50aa1d950d89cb8b355e992a3fdcb3125)) - -* Update element_array_ephys/ephys_precluster.py ([`f2baf12`](https://github.com/datajoint/element-array-ephys/commit/f2baf12fe2d9a6417f2d63674563570bb72453af)) - -* Update element_array_ephys/ephys_precluster.py ([`ec0ebf2`](https://github.com/datajoint/element-array-ephys/commit/ec0ebf206dcfff530b0d9f3c2c7dfa50ef1d66f3)) - -* Update element_array_ephys/ephys_precluster.py ([`8d793ac`](https://github.com/datajoint/element-array-ephys/commit/8d793ac82d3be366d4a07900c399d39210ff2ad0)) - -* Add documentation for ephys modules ([`644a114`](https://github.com/datajoint/element-array-ephys/commit/644a114b72e8ae4efc332291721d3987aa22a007)) - -* Rename image ([`91950b0`](https://github.com/datajoint/element-array-ephys/commit/91950b0b1d88d92563ec5ee02245602e0c022480)) - -* Merge 'main' of datajoint/element-array-ephys ([`1b60995`](https://github.com/datajoint/element-array-ephys/commit/1b60995453afae074d2b32d33dabbf06044b1dad)) - -* Merge pull request #44 from bendichter/convert_to_nwb - -Convert to nwb ([`7a4fba9`](https://github.com/datajoint/element-array-ephys/commit/7a4fba9ba51d6ee1cf21bb7eaf87a59b4accfd44)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`9ee6088`](https://github.com/datajoint/element-array-ephys/commit/9ee60885e638e85746a093c791a848dbd37f2472)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`441cfe2`](https://github.com/datajoint/element-array-ephys/commit/441cfe2e2ab00765ad9603d28f0bd8a50d48d1d1)) - -* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`6fc51b0`](https://github.com/datajoint/element-array-ephys/commit/6fc51b055ef77069f04bc72cf7999d8e0c6717b0)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`9f9872c`](https://github.com/datajoint/element-array-ephys/commit/9f9872c37eb325b441703d4204889f6298d1ba4e)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`fb98327`](https://github.com/datajoint/element-array-ephys/commit/fb983274e8294fbee5703e6ccaaa3d46ad1394b4)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`d94453b`](https://github.com/datajoint/element-array-ephys/commit/d94453b1e7704ef41f830ae4c7e06f569a37f545)) - -* remove ephys_no_curation.py ([`3e07c61`](https://github.com/datajoint/element-array-ephys/commit/3e07c61b7556fffea3c7daa7409f70f51541e76e)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`59028cb`](https://github.com/datajoint/element-array-ephys/commit/59028cb22c1ee533d4635d677eea897a933cbf71)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`068ea3d`](https://github.com/datajoint/element-array-ephys/commit/068ea3d3682cc00d988807e804744102f8c0e359)) - -* trying clustering_query..proj() ([`c2004ea`](https://github.com/datajoint/element-array-ephys/commit/c2004eabf06822d563e06515e50476ac76ec3610)) - -* rmv units_query ([`0152c5d`](https://github.com/datajoint/element-array-ephys/commit/0152c5d97dd6b40119312f50da62f31e476dbad4)) - -* fix insertion record ([`82c8655`](https://github.com/datajoint/element-array-ephys/commit/82c86559e86e64580b740325741a5f62e6cf037f)) - -* add explanation for index parameter ([`707adff`](https://github.com/datajoint/element-array-ephys/commit/707adff4e6fbb451e0582115b85b72b36002ba9e)) - -* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`bc54009`](https://github.com/datajoint/element-array-ephys/commit/bc54009c7520d29d13b2a5c5f3def3e8888502c6)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`842beec`](https://github.com/datajoint/element-array-ephys/commit/842beec80ed5d846aed29a48575db1de1457bdf9)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`766f4eb`](https://github.com/datajoint/element-array-ephys/commit/766f4eb0962cf36e10290cc1ff4dfb27ad74de87)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2526812`](https://github.com/datajoint/element-array-ephys/commit/2526812f664e7e58a9db368e293dc8e7927615d7)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`15044cf`](https://github.com/datajoint/element-array-ephys/commit/15044cf4c85ae67f9e2a73031980180380a0d974)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2e621c0`](https://github.com/datajoint/element-array-ephys/commit/2e621c02a4809e1c5a540ddc7e22c07f5fcdec1b)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`a01ee9c`](https://github.com/datajoint/element-array-ephys/commit/a01ee9ca8e277265382adb674b707ba67f173c01)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`cbefcde`](https://github.com/datajoint/element-array-ephys/commit/cbefcde9f81de6be64eeb31a04631fbb00fe6431)) - -* add explanation in docstring of add_ephys_units_to_nwb ([`3971fe6`](https://github.com/datajoint/element-array-ephys/commit/3971fe6632030d789ab6b159936d3f4dc2f5f878)) - -* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`a7b2abb`](https://github.com/datajoint/element-array-ephys/commit/a7b2abb99baf7295e4549ba5eb0edf7afb9acb63)) - -* Update element_array_ephys/export/nwb/nwb.py ([`c200699`](https://github.com/datajoint/element-array-ephys/commit/c200699f35d8f90be07f4a8fb194e33b504d9f78)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`2ee2bd5`](https://github.com/datajoint/element-array-ephys/commit/2ee2bd544ed777c537dc18b6421b4764462f3482)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`9e72773`](https://github.com/datajoint/element-array-ephys/commit/9e72773d701550b9c613dfb208f6543ce227e803)) - -* Merge branch 'main' into convert_to_nwb ([`b3779e5`](https://github.com/datajoint/element-array-ephys/commit/b3779e58a0b5f2be73abfc07c5425272089a9250)) - -* fix docstring for get_electrodes_mapping ([`acdb5f9`](https://github.com/datajoint/element-array-ephys/commit/acdb5f9d25c4f1c1f6e89dd37fd8e1697327a8e9)) - -* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`826335b`](https://github.com/datajoint/element-array-ephys/commit/826335be00a09481b17b06ce6901c152490f301e)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`49fac08`](https://github.com/datajoint/element-array-ephys/commit/49fac083d5de5c853acb8833460edaa07132638e)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`ba6cbcf`](https://github.com/datajoint/element-array-ephys/commit/ba6cbcf1c3f03cd6a68b96a5b10e28398d9d60e9)) - -* specify releases for dependencies ([`daccfc4`](https://github.com/datajoint/element-array-ephys/commit/daccfc4dd7a48142739dfee5b00a3ee7c9624d19)) - -* add docstring for gain_helper ([`12974ff`](https://github.com/datajoint/element-array-ephys/commit/12974ff0df04fb8dc15b650e5cbbd3b51aa6340f)) - -* Update element_array_ephys/export/nwb/README.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`00c3691`](https://github.com/datajoint/element-array-ephys/commit/00c369144a50cc34acb0fdab12ea0ac93b6627a4)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`abccafa`](https://github.com/datajoint/element-array-ephys/commit/abccafa522f930f099eefd49c14e69eb95c56067)) - -* Update element_array_ephys/export/nwb/README.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`24ac2c0`](https://github.com/datajoint/element-array-ephys/commit/24ac2c038b3e7403a3130e43a385c1bab0acb8f5)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`a1fb193`](https://github.com/datajoint/element-array-ephys/commit/a1fb1934cb34b1ebedb11a10f514cb1ce24b9e00)) - -* Update element_array_ephys/export/nwb/README.md - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`584c738`](https://github.com/datajoint/element-array-ephys/commit/584c738982b53d5988e338302e72f7967b43fe2d)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`c68f2ca`](https://github.com/datajoint/element-array-ephys/commit/c68f2cabfe9d362d4a176a80eb8b87f068f8e317)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`3ae6e2d`](https://github.com/datajoint/element-array-ephys/commit/3ae6e2d0f96ac74d77cf5617719abc638c1fba66)) - -* Update element_array_ephys/ephys_acute.py - -Co-authored-by: Chris Brozdowski <CBrozdowski@yahoo.com> ([`e973743`](https://github.com/datajoint/element-array-ephys/commit/e9737435f55f8b3ef48c10ee691c0bfd37dd7e21)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`b373b26`](https://github.com/datajoint/element-array-ephys/commit/b373b264080e36e379e775499a1b3166c004b546)) - -* fix imports ([`cc14671`](https://github.com/datajoint/element-array-ephys/commit/cc146716036cf8dba74a241160f0b67ae2c552bd)) - -* rmv tests (they are moved to the ephys workflow) ([`c63c8c4`](https://github.com/datajoint/element-array-ephys/commit/c63c8c42fc640319d4cfc4acf90e1b1595e63e3b)) - -* Merge remote-tracking branch 'cbroz1/ben' into convert_to_nwb - -# Conflicts: -# element_array_ephys/export/nwb/nwb.py ([`5b942b3`](https://github.com/datajoint/element-array-ephys/commit/5b942b33d1e4611367f962c70ec571b3d99f2143)) - -* adjust imports in __init__ and nwb.py ([`782a4a5`](https://github.com/datajoint/element-array-ephys/commit/782a4a5ee0cb7c6e1e382c2985dce45c418c7c19)) - -* Merge pull request #2 from ttngu207/chris-nwb - -import the correct ephys module that has been activated ([`2beb41b`](https://github.com/datajoint/element-array-ephys/commit/2beb41b9af183468846a0c03b7de8ab740f855c4)) - -* import the correct ephys module that has been activated ([`b4ffe1d`](https://github.com/datajoint/element-array-ephys/commit/b4ffe1d68fe068a5bd233ad8c9adb3bda09cf145)) - -* Merge pull request #1 from ttngu207/chris-nwb - -nwb function specification in linking_module ([`6d7ad7c`](https://github.com/datajoint/element-array-ephys/commit/6d7ad7c131037b882c1cf7a1183b5438e4635dbd)) - -* nwb function specification in linking_module ([`bf5e82a`](https://github.com/datajoint/element-array-ephys/commit/bf5e82aa63d771151ddd7348d5d37832cad8ac9d)) - -* Avoid linking_module issues. See details. - -- add __init__ + schema from git/ttngu207/element_array_ephys@no-curation -- add ephys_no_curation schema to match the schema ben has been pulling from - - how should we address this in the element? given not currently default -- remove unused imports -- import datajoint and element_interface.utils find_full_path -- add arguments to main export function: - - schema names as datajoint config database prefix default - - ephys_root_data_dir - default to dj.config or none -- add create_virtual_module statements to avoid activate(schema,linking_module=unknown) -- declare ephys and probe as global -- add assert errors for ephys_root_data_dir!=None when needed -- pass ephys_root_data_dir to relevant functions -- above permits: from element_array_ephys.export.nwb.nwb import ecephys_session_to_nwb ([`2595dae`](https://github.com/datajoint/element-array-ephys/commit/2595daee613f1b080b6bc4a6743865e8f9b42dc7)) - -* Rebase, squashed. See Details - -Add element_data_loader for multiple root dirs -Update author -Fix import -Fix OpenEphys session path -Update directory path -Add print statement -Fix for missing `fileTimeSecs` -Update error message -Suggested adds re upstream components -Directing to workflow for upstream `SkullReference` and utility functions ([`a557b17`](https://github.com/datajoint/element-array-ephys/commit/a557b17caaa0d779f91bee105a17fb1418121e00)) - -* rmv subject_id ([`806684f`](https://github.com/datajoint/element-array-ephys/commit/806684f4534c11982e78d50264f47257c3c3018f)) - -* import GenericDataChunkIterator from hdmf ([`a7f4624`](https://github.com/datajoint/element-array-ephys/commit/a7f46242d849a65149cb7207a87ec489402f7452)) - -* add tests for getting lfp data from datajoint ([`fafdde1`](https://github.com/datajoint/element-array-ephys/commit/fafdde1257aafd00b3c47739be8a73a4c7f09087)) - -* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb ([`63b545d`](https://github.com/datajoint/element-array-ephys/commit/63b545dec4d788132bf28832d1156488d478e47e)) - -* Update element_array_ephys/export/nwb/nwb.py - -Co-authored-by: Kabilar Gunalan <kabilar@datajoint.com> ([`9374d94`](https://github.com/datajoint/element-array-ephys/commit/9374d94d1372f6f964ecb3bd628c5f97c18c261b)) - -* Merge branch 'main' into convert_to_nwb ([`f8027cc`](https://github.com/datajoint/element-array-ephys/commit/f8027ccedbae3418f3cd8e54d6a0197aeea41c65)) - -* update import path ([`6338daf`](https://github.com/datajoint/element-array-ephys/commit/6338dafd0a00c2eb862726977cf6c2470a8c7b1a)) - -* add tests ([`4cab8c8`](https://github.com/datajoint/element-array-ephys/commit/4cab8c86620094e912e7193c3a579989046d26b1)) - -* refactor into gains_helper ([`7953662`](https://github.com/datajoint/element-array-ephys/commit/79536621e8b6b9f35ed4cb61e5384c433da28bc0)) - -* correctly set conversion and channel_conversion ([`7deb00f`](https://github.com/datajoint/element-array-ephys/commit/7deb00f8e360c5b23e8b63333c55b50ece5334c3)) - -* ephys.find_full_path ([`654d567`](https://github.com/datajoint/element-array-ephys/commit/654d567f8701940bdec6c426d65cb619c3fa2016)) - -* import os ([`ba3f86a`](https://github.com/datajoint/element-array-ephys/commit/ba3f86a2a956e26beb7223cbf09dc779de894a8b)) - -* standardize slashes ([`8d3df71`](https://github.com/datajoint/element-array-ephys/commit/8d3df711fc6f79282e81ac2794c2ca0b5d19c10c)) - -* ephys.get_ephys_root_data_dir ([`e992478`](https://github.com/datajoint/element-array-ephys/commit/e99247874924a39596510fb144f3272f00886804)) - -* import probe ([`379ae11`](https://github.com/datajoint/element-array-ephys/commit/379ae11c718242d04cc6c2eb62a76a433d2b94cf)) - -* import session_to_nwb ([`1dccb68`](https://github.com/datajoint/element-array-ephys/commit/1dccb6893fa8a77408e598f278995998b5c8b45c)) - -* import from workflow pipeline ([`5de22e0`](https://github.com/datajoint/element-array-ephys/commit/5de22e06fa2db7b6e2005bbc094b05fc3581e850)) - -* import from workflow pipeline ([`1b67629`](https://github.com/datajoint/element-array-ephys/commit/1b676293227e03e77293c223f8d3f336c832ee59)) - -* fix nwbfile_kwargs logic ([`eb47ee5`](https://github.com/datajoint/element-array-ephys/commit/eb47ee506db1fd1929da2d557c17776c28675326)) - -* fix nwbfile_kwargs logic ([`96c57f7`](https://github.com/datajoint/element-array-ephys/commit/96c57f756243eb00ced7e157e417ab1af5881c10)) - -* add optional session keys to ecephys_session_to_nwb ([`365b43b`](https://github.com/datajoint/element-array-ephys/commit/365b43b0fa584a531b386afe74eb55e853773a2d)) - -* add datetime import ([`d1f3dab`](https://github.com/datajoint/element-array-ephys/commit/d1f3daba676385a54aa108bb554bce008594b96d)) - -* relative import of ephys ([`1363214`](https://github.com/datajoint/element-array-ephys/commit/13632147a56826fe7ccba065aa02e370950a72c0)) - -* refactor to include requirements for nwb conversion ([`013ae7d`](https://github.com/datajoint/element-array-ephys/commit/013ae7da1bc94ed2c6c5e87b79378e121826882b)) - -* add readme for exporting to NWB ([`19f78b7`](https://github.com/datajoint/element-array-ephys/commit/19f78b78ef4467f1edd7b3270a8df51cdf349a91)) - -* add some docstrings ([`b35ee72`](https://github.com/datajoint/element-array-ephys/commit/b35ee723dc39777dfede2334452021a5c234f6d8)) - -* add missing nwbfile arg ([`a7f846b`](https://github.com/datajoint/element-array-ephys/commit/a7f846be82b020c5b6ace34b524dd99c5438d345)) - -* Merge remote-tracking branch 'origin/convert_to_nwb' into convert_to_nwb - -# Conflicts: -# element_array_ephys/export/nwb.py ([`b4d9c0e`](https://github.com/datajoint/element-array-ephys/commit/b4d9c0edd791f4f846b99c6bcc98ee688bbb508c)) - -* Update element_array_ephys/export/nwb.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`e837452`](https://github.com/datajoint/element-array-ephys/commit/e83745247b09b369f6992590446c7167c32bddb7)) - -* Update element_array_ephys/export/nwb.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`3298341`](https://github.com/datajoint/element-array-ephys/commit/3298341553063a8e2d165c25cfa18c0050047cde)) - -* Update element_array_ephys/export/nwb.py - -Co-authored-by: Dimitri Yatsenko <dimitri@datajoint.com> ([`470b20a`](https://github.com/datajoint/element-array-ephys/commit/470b20aa3914fb8c0bed943c7d2f82c0df5dd4fd)) - -* * optimize imports -* black -* upgrade to latest version of conversion-tools -* upgrade to latest spikeinterface api -* ([`d924d57`](https://github.com/datajoint/element-array-ephys/commit/d924d57ea62ec24c791135a16f3ed397493084d5)) - -* add documentation ([`d2b93f2`](https://github.com/datajoint/element-array-ephys/commit/d2b93f2054c51848a20d1a8da44e3cc7a9586973)) - -* * add lfp from source -* docstrings -* json dump insertion into location -* ignore channel_conversion if all are 1 -* black formatting ([`8d75a61`](https://github.com/datajoint/element-array-ephys/commit/8d75a61ac29b0d51381104472a8570db6c99bcb5)) - -* add draft convert to nwb ([`f3dd8d8`](https://github.com/datajoint/element-array-ephys/commit/f3dd8d80c0a0b9a4e16734a35cd0e2f4520a0142)) - -* Mult rootdirs. If sess_dir, check fullpath. Give OpenEphys fullpath. ([`720eb00`](https://github.com/datajoint/element-array-ephys/commit/720eb00e60c6df928f4e6cbe938d4db625feab58)) - -* Add Part table to track order of operations ([`3d8ec16`](https://github.com/datajoint/element-array-ephys/commit/3d8ec16a85507d4708ceedfdcffd4609546dc0f7)) - -* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`fee5b7c`](https://github.com/datajoint/element-array-ephys/commit/fee5b7ca39b564985ad5907dcbdfd58f719beeec)) - -* Merge pull request #1 from kabilar/precluster - -Merge to main ([`1e92b04`](https://github.com/datajoint/element-array-ephys/commit/1e92b048c2da3dd50916df1825bf8e477de9bb05)) - -* Add pre-clustering tables to acute module ([`c7a155a`](https://github.com/datajoint/element-array-ephys/commit/c7a155a9d4ab384d667046fefb316df7d00ed656)) - -* Create copy of `ephys_acute.py` ([`122d9a2`](https://github.com/datajoint/element-array-ephys/commit/122d9a2a598263340d1dcdc49353ac3916ffc3b7)) - -* Merge pull request #64 from kabilar/main - -Update README ([`14517a2`](https://github.com/datajoint/element-array-ephys/commit/14517a27288cf2a2ddfd526da64a2d32da69b156)) - -* Replace italics with back tick ([`b3f5b29`](https://github.com/datajoint/element-array-ephys/commit/b3f5b295ed233aedad5db1f872be18c2df644951)) - -* Replace italics with back tick ([`e8350d5`](https://github.com/datajoint/element-array-ephys/commit/e8350d5bd5e02a7150c7c5c2589daef8aaaa54a8)) - -* Add ephys schema text ([`112f325`](https://github.com/datajoint/element-array-ephys/commit/112f325d2bdc2e940756531282ac38f5e67bc67d)) - -* Add activation text ([`edc9d5d`](https://github.com/datajoint/element-array-ephys/commit/edc9d5dd3c972ea83042b3f844d5011d2602d1c3)) - -* Add ephys schema text ([`ab92d84`](https://github.com/datajoint/element-array-ephys/commit/ab92d847e2ec39d7f0d6dc851ac6ef26bfaf7bcb)) - -* Revert "Create copy of `ephys_acute.py`" - -This reverts commit b66109b5e61297a10c1cc8a929115fa5955238e1. ([`000308f`](https://github.com/datajoint/element-array-ephys/commit/000308f7261794e6acd39762e524c27331bb1a0a)) - -* Add probe schema text ([`4f8699f`](https://github.com/datajoint/element-array-ephys/commit/4f8699fa5859e4a873c5dc32fbf4fae64e5073af)) - -* Create copy of `ephys_acute.py` ([`c7393fc`](https://github.com/datajoint/element-array-ephys/commit/c7393fcb4933846b6862548a47c29cadc4d97801)) - -* Revert "Create copy of `ephys_acute.py`" - -This reverts commit b66109b5e61297a10c1cc8a929115fa5955238e1. ([`9ddfb4c`](https://github.com/datajoint/element-array-ephys/commit/9ddfb4c927cca7a8de40965752d7eac2e06bd07d)) - -* Update format ([`f940a71`](https://github.com/datajoint/element-array-ephys/commit/f940a719d9c409eb40b760f8513ba9737b5cf809)) - -* Add collapsible sections ([`338a796`](https://github.com/datajoint/element-array-ephys/commit/338a796ff2ca3cddf460309c6776034f6409aeed)) - -* Update format ([`9b68b03`](https://github.com/datajoint/element-array-ephys/commit/9b68b0332d1cd228327fea70459b1383b77b4473)) - -* Add collapsible section ([`8740b2c`](https://github.com/datajoint/element-array-ephys/commit/8740b2ca2039a1a4d4fd7ab9a2effcabfbc9d7d6)) - -* Add citation section ([`c8ac8e6`](https://github.com/datajoint/element-array-ephys/commit/c8ac8e656ddd374d3855fc22241c304624d925dd)) - -* Add links to elements.datajoint.org ([`0bc69c2`](https://github.com/datajoint/element-array-ephys/commit/0bc69c217cc679a917de97587f5d2bd205e7a41a)) - -* Add link to elements.datajoint.org ([`d11af2f`](https://github.com/datajoint/element-array-ephys/commit/d11af2f30659b983b936b922264e6d583ce4bc86)) - -* Move background file to datajoint-elements repo ([`68c94c1`](https://github.com/datajoint/element-array-ephys/commit/68c94c19884dcc41f998852537e97d8b231a7ccf)) - -* Create copy of `ephys_acute.py` ([`b66109b`](https://github.com/datajoint/element-array-ephys/commit/b66109b5e61297a10c1cc8a929115fa5955238e1)) - -* Move background file to datajoint-elements repo ([`f8a3abf`](https://github.com/datajoint/element-array-ephys/commit/f8a3abfa0af6abffec43654b0c75ae32c45c71c3)) - -* Merge pull request #58 from kabilar/main - -Add attributes and rename module ([`1f7a2a3`](https://github.com/datajoint/element-array-ephys/commit/1f7a2a36fe0162c0ba6241f18ff34338b323e854)) - -* Ensure backwards compatibility ([`fc38bb5`](https://github.com/datajoint/element-array-ephys/commit/fc38bb588f03fe6971bd269a19498b50b7c2d6c7)) - -* Update string formatting ([`0d56f2e`](https://github.com/datajoint/element-array-ephys/commit/0d56f2e62be96fd0a94b6a43142c0cb620b16fb7)) - -* Rename module `ephys` to `ephys_acute` ([`1104ab4`](https://github.com/datajoint/element-array-ephys/commit/1104ab4b8820c883857e7412374c20bbc2b27689)) - -* Add recording metadata ([`65b9ece`](https://github.com/datajoint/element-array-ephys/commit/65b9ece60e0ec988ed201b9d4d036e1c1535fb7a)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`b13995e`](https://github.com/datajoint/element-array-ephys/commit/b13995e230732f8e6a3b957b1388d0d82e79a274)) - -* Merge pull request #35 from kabilar/main - -Implement `find_full_path` within `ephys` modules ([`088093d`](https://github.com/datajoint/element-array-ephys/commit/088093d068c444311940a195f4b7408bbf0db429)) - -* Increase datatype size ([`6b17239`](https://github.com/datajoint/element-array-ephys/commit/6b1723940edf595329235c4805bfcc5b1b6544a8)) - -* Rename package ([`6f9507c`](https://github.com/datajoint/element-array-ephys/commit/6f9507c3d752f5bd010e1cf24a7369060d77b8d7)) - -* Merge branch 'main' of https://github.com/kabilar/element-array-ephys into main ([`ce6adf1`](https://github.com/datajoint/element-array-ephys/commit/ce6adf1c6a7409ecd98907f3a3a5c6d50f0c10bb)) - -* Suggested adds re upstream components - -Directing to workflow for upstream `SkullReference` and utility functions ([`4ca9b32`](https://github.com/datajoint/element-array-ephys/commit/4ca9b328f705d9363dd70a88ad857c5994f65d77)) - -* Update error message ([`09e8a96`](https://github.com/datajoint/element-array-ephys/commit/09e8a96504898f7840b031d09ce9346b639b600f)) - -* Remove print statement ([`1a4a7f5`](https://github.com/datajoint/element-array-ephys/commit/1a4a7f5c0834f62a64a6508de0d0b5de148a4657)) - -* [WIP] Add print statement ([`84bb616`](https://github.com/datajoint/element-array-ephys/commit/84bb6169c98fedfea50418a10c31e870b1e8913f)) - -* Fix for missing `fileTimeSecs` ([`665cc28`](https://github.com/datajoint/element-array-ephys/commit/665cc287b5b84cfe961bca3e47c9ff407483a2b9)) - -* Update module import ([`818cc53`](https://github.com/datajoint/element-array-ephys/commit/818cc53edb5395a1cc845958a373365679174f22)) - -* Fixed doc string ([`9881350`](https://github.com/datajoint/element-array-ephys/commit/98813508f9a77ee3110d8df055957308361273d5)) - -* Update module import ([`139e99b`](https://github.com/datajoint/element-array-ephys/commit/139e99b4d8dfec9c267dd8718b533cdb5a59bc00)) - -* Fix module import ([`44be355`](https://github.com/datajoint/element-array-ephys/commit/44be35568edfab666d48dcaa30a02e72ea65159f)) - -* Remove test print statement ([`cf533a2`](https://github.com/datajoint/element-array-ephys/commit/cf533a275ca1220136472bbdf30048dc9f8c92e9)) - -* [WIP] Add print statement ([`b98192b`](https://github.com/datajoint/element-array-ephys/commit/b98192b8ca2cde9e2babbc48b383673a5ae15a94)) - -* [WIP] Update directory path ([`49c554b`](https://github.com/datajoint/element-array-ephys/commit/49c554bea6a2431140b553098f889f717600da3a)) - -* Update comments ([`ab426c1`](https://github.com/datajoint/element-array-ephys/commit/ab426c1ed9ea14b960bcd3d3e1970c74ef020143)) - -* Fix OpenEphys session path ([`2233c5d`](https://github.com/datajoint/element-array-ephys/commit/2233c5ddff6351b541125b53cb6c49a424cacc72)) - -* [WIP] Print directory path ([`68ef14b`](https://github.com/datajoint/element-array-ephys/commit/68ef14b180c7fd5d61bbac1a9d6ec9d4a7c0530e)) - -* Fix import ([`2be1f08`](https://github.com/datajoint/element-array-ephys/commit/2be1f08af1d428570f5155f7d11463646805886b)) - -* Update author ([`b6b39c0`](https://github.com/datajoint/element-array-ephys/commit/b6b39c093a7603eba1a40b9b3b82db1c6294aac9)) - -* Add element_data_loader for multiple root dirs ([`ffaf60b`](https://github.com/datajoint/element-array-ephys/commit/ffaf60b72b648229b47e76ff9bb75ddedd56ef13)) - -* Move functions to `element-data-loader` ([`4f4be8d`](https://github.com/datajoint/element-array-ephys/commit/4f4be8d264398c3251baae5edc9be37a97c3f753)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`a0f49d2`](https://github.com/datajoint/element-array-ephys/commit/a0f49d27cacaceaf54a688abb4e9de2a153574a8)) - -* Merge pull request #30 from ttngu207/chronic_and_acute - -chronic and acute probe insertions as different python modules ([`1fdbcf1`](https://github.com/datajoint/element-array-ephys/commit/1fdbcf12d1a518e686b6b79e9fbe77b736cb606a)) - -* rename to `ephys_chronic` ([`7474f8f`](https://github.com/datajoint/element-array-ephys/commit/7474f8f2358b784d133277ecd1da8e687ab5fa14)) - -* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into chronic_and_acute ([`f28f0c6`](https://github.com/datajoint/element-array-ephys/commit/f28f0c6f489566e3c0be9cb8235ba7ea80d716f2)) - -* Update Dockerfile ([`d126bc5`](https://github.com/datajoint/element-array-ephys/commit/d126bc53476c5687dfcaf63689d74c18b707cf3e)) - -* chronic and acute probe insertions as different python modules ([`c4a9ab8`](https://github.com/datajoint/element-array-ephys/commit/c4a9ab8214c23c3c61f5f41ffef1c529e2c82b59)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`6472c19`](https://github.com/datajoint/element-array-ephys/commit/6472c19b6b21ffe091c4098ec848802453a63c3d)) - -* Merge pull request #27 from ttngu207/main - -beta 0 release ([`7c67f65`](https://github.com/datajoint/element-array-ephys/commit/7c67f65bcc8844eed429ab5b5a10f457162f9f15)) - -* beta 0 release ([`69a5424`](https://github.com/datajoint/element-array-ephys/commit/69a5424f0404eae379978a7f990b677d62aa42b9)) - -* Merge pull request #26 from ttngu207/main - -bump alpha version for first release on PyPI ([`7cd92ee`](https://github.com/datajoint/element-array-ephys/commit/7cd92ee693779a03abe73f91fca267a82435e59a)) - -* bump alpha version for first release on PyPI ([`99ab2fa`](https://github.com/datajoint/element-array-ephys/commit/99ab2fa6033e14c2ea98a4fced629287042766a6)) - -* Merge pull request #25 from ttngu207/main - -bump alpha version for first release on pypi ([`159a2a5`](https://github.com/datajoint/element-array-ephys/commit/159a2a5d4befc1748ab11f5443239a09df759ea6)) - -* bump alpha version for first release on pypi ([`ab3cfc9`](https://github.com/datajoint/element-array-ephys/commit/ab3cfc922bb76e0d6e3a0930ba3f995a47891802)) - -* Merge pull request #24 from ttngu207/main - -update README, improve markdown formatting, specify `long_description_content_type` to markdown, add versioning and GH Action for PyPI release ([`07f858c`](https://github.com/datajoint/element-array-ephys/commit/07f858c36437a7a79d8a5bddb49d026c03f274ad)) - -* Apply suggestions from code review - -Co-authored-by: Dimitri Yatsenko <dimitri@vathes.com> ([`98753ed`](https://github.com/datajoint/element-array-ephys/commit/98753ed3c3a6cff88f445b633d47b8e27fb4f7df)) - -* minor code cleanup ([`d68d53e`](https://github.com/datajoint/element-array-ephys/commit/d68d53e28eebf51a6f44c53a591b01ee0a894e54)) - -* versioning and GH Action for PyPI release ([`0ffc885`](https://github.com/datajoint/element-array-ephys/commit/0ffc88595b1365992e94f6b53fe9bb7b0d4a75c4)) - -* update diagram ([`9ece8cd`](https://github.com/datajoint/element-array-ephys/commit/9ece8cdc32b6fbe164874bf846f8c0eb26c2d8b7)) - -* update README, improve markdown formatting, specify `long_description_content_type` to markdown ([`3c9662b`](https://github.com/datajoint/element-array-ephys/commit/3c9662bd166d052c43c6ed5fd080bfd1c4b764ec)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`1ce53f3`](https://github.com/datajoint/element-array-ephys/commit/1ce53f37081b14744e625b8896df963af24cea2a)) - -* Merge pull request #23 from ttngu207/main - -added comments to tables ([`f2ac602`](https://github.com/datajoint/element-array-ephys/commit/f2ac602f71d9e105c584b91aa3b04a9cda6f931e)) - -* added comments to tables ([`f05e1fe`](https://github.com/datajoint/element-array-ephys/commit/f05e1fe5b1b7c992f35df9070049626a48cbcddc)) - -* Merge pull request #22 from ttngu207/main - -bump version - 0.1.0a3 ([`6fcc31a`](https://github.com/datajoint/element-array-ephys/commit/6fcc31ac94afdf1602c9ce5190f682cded37a19b)) - -* Update CHANGELOG.md - -Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`8d8683a`](https://github.com/datajoint/element-array-ephys/commit/8d8683aa6c03b7c834e368820dc13222703d177f)) - -* bump version - 0.1.0a3 ([`f500492`](https://github.com/datajoint/element-array-ephys/commit/f50049292446b8479f4c0d3df83cee03892c85cb)) - -* Merge pull request #21 from ttngu207/main - -GitHub Action for release process ([`283fad0`](https://github.com/datajoint/element-array-ephys/commit/283fad06c73f7b98e4f7f0d469005ded1149ad99)) - -* minor cleanup ([`6120883`](https://github.com/datajoint/element-array-ephys/commit/6120883e50b73ccabd7589a42971c47050c1b002)) - -* Apply suggestions from code review - -Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`ef3578e`](https://github.com/datajoint/element-array-ephys/commit/ef3578e3a6c1eaf3e082c0558c368b49cceb6a24)) - -* re-work `pkg_name` and use README as `long_description` ([`1cbc62a`](https://github.com/datajoint/element-array-ephys/commit/1cbc62aaf9ba42534666e4790debdd3eba5a88d4)) - -* add docker-compose to gitignore ([`fc8f72b`](https://github.com/datajoint/element-array-ephys/commit/fc8f72b3739adbe95a5dcd822bacaef1327aa95c)) - -* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into main ([`5e32e91`](https://github.com/datajoint/element-array-ephys/commit/5e32e91b5af2ff64cb119ed730508c0ac67a2f51)) - -* Apply suggestions from code review - -Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`7cf70d1`](https://github.com/datajoint/element-array-ephys/commit/7cf70d10e7b064cd537121e8c23480939cfeed95)) - -* for testing - update twine upload to testpypi ([`ecc0ab2`](https://github.com/datajoint/element-array-ephys/commit/ecc0ab2aefbea413361371059d9fd22d190b2306)) - -* address review comments, add test-changelog ([`ef7b6c9`](https://github.com/datajoint/element-array-ephys/commit/ef7b6c91c417c5b2cccde7bbc4a08b8f0c5ec02e)) - -* Apply suggestions from code review - -Co-authored-by: Raphael Guzman <38401847+guzman-raphael@users.noreply.github.com> ([`17dc100`](https://github.com/datajoint/element-array-ephys/commit/17dc100159947670b024f83c5e28e35567d444b3)) - -* Update CHANGELOG.md ([`e04f739`](https://github.com/datajoint/element-array-ephys/commit/e04f739df575ddab534e9e3b8aa26c3b2ba41cc1)) - -* version 0.1.0a3 ([`f433189`](https://github.com/datajoint/element-array-ephys/commit/f4331894dc804a62f660303038a831fb273a86e7)) - -* update setup, point back to `datajoint` github ([`c7a1940`](https://github.com/datajoint/element-array-ephys/commit/c7a194023a77cb89686bd2e3685494180eca6099)) - -* GH Action bugfix - bump version ([`f2c9726`](https://github.com/datajoint/element-array-ephys/commit/f2c972601a8b826d505849d74d9f7b6b7d13dcc8)) - -* bugfix, add SDIST_PKG_NAME ([`e8632a3`](https://github.com/datajoint/element-array-ephys/commit/e8632a3b267f8e40717c0ee457e10e35403e5777)) - -* improve package_name parsing ([`be26e4b`](https://github.com/datajoint/element-array-ephys/commit/be26e4b24718830b66a3fc2774b22cf1e448f2b3)) - -* Update development.yaml ([`ff5f5f9`](https://github.com/datajoint/element-array-ephys/commit/ff5f5f900a6beddd835c7d6af366b563cd8f31f8)) - -* Update development.yaml ([`f847aeb`](https://github.com/datajoint/element-array-ephys/commit/f847aebcdb1b099dd987f292d34891c065d71ffb)) - -* add `build` to GH action ([`5052b8e`](https://github.com/datajoint/element-array-ephys/commit/5052b8e7e494f2397bbc0a108ef5e9825c37206f)) - -* change package url - for testing GH release only ([`77f5240`](https://github.com/datajoint/element-array-ephys/commit/77f524093c69ce371627f12a386efa78332b779f)) - -* update changelog - bump version to 0.1.0a3 ([`bae08ad`](https://github.com/datajoint/element-array-ephys/commit/bae08ad5b7995b1246f0ead8f456ebe164f68053)) - -* Update development.yaml ([`d124407`](https://github.com/datajoint/element-array-ephys/commit/d1244077f55ac65ddddd5335a7684eb13280e37c)) - -* Update .gitignore ([`b8eb640`](https://github.com/datajoint/element-array-ephys/commit/b8eb64025e65bd49081d9d7c9e93a7ad3e8dc7fd)) - -* set up release processes for GH Action ([`a94e726`](https://github.com/datajoint/element-array-ephys/commit/a94e7268ca99d33dee0c15d1aac1f47747ca7bfd)) - -* Merge branch 'main' into GH-action-PyPI-release ([`b991fbb`](https://github.com/datajoint/element-array-ephys/commit/b991fbb776b71a5feff09b638f171488b78bd3ee)) - -* add `package_version` ([`02f5387`](https://github.com/datajoint/element-array-ephys/commit/02f5387a7bd4bce569193fecf09198456ea3b7b1)) - -* Create CHANGELOG.md ([`77a7a52`](https://github.com/datajoint/element-array-ephys/commit/77a7a5293f3d97a091a3ba159a87fe08967f9125)) - -* Merge pull request #20 from ttngu207/main - -table renames, code cleanup ([`4cced0e`](https://github.com/datajoint/element-array-ephys/commit/4cced0edd25ef2186dd9498c67632363cf37eab2)) - -* table renames, code cleanup ([`236301a`](https://github.com/datajoint/element-array-ephys/commit/236301ab821107e37d26d59ee00e643c10e7f8d6)) - -* Merge pull request #19 from ttngu207/main - -Code cleanup/optimization, variables renaming for clarity ([`b0fa79f`](https://github.com/datajoint/element-array-ephys/commit/b0fa79ff1895e963f9f4ba6b11160ca4df2d087b)) - -* minor bugfix ([`9b18415`](https://github.com/datajoint/element-array-ephys/commit/9b184159115cd48a60c32f6d406675caeed8147a)) - -* split `find_valid_full_path` to `find_full_path` and `find_root_directory` ([`258839b`](https://github.com/datajoint/element-array-ephys/commit/258839b3a97c03cccbf36deeaa7637724af98bb5)) - -* support `.xlsx` cluster files ([`4e824cf`](https://github.com/datajoint/element-array-ephys/commit/4e824cfbef5cfee6555c77e0341ef069bd174703)) - -* minor wording fix ([`855f8eb`](https://github.com/datajoint/element-array-ephys/commit/855f8eb60c8f9ac3411badaff7fa20ea7d908caa)) - -* remove `get_clustering_root_data_dir()` from docstring ([`6f01562`](https://github.com/datajoint/element-array-ephys/commit/6f01562f8c2b60a497be89474958956e962171c8)) - -* allow root_dir to be a list of potential directories - util function `find_valid_full_path()` for root and path searching ([`6488fee`](https://github.com/datajoint/element-array-ephys/commit/6488fee6499a995756a303416740809b1b5886a7)) - -* code refactor - improve logic for `spikeglx_meta` file search ([`e51113b`](https://github.com/datajoint/element-array-ephys/commit/e51113b94bca3267f2870936c34d1140ceb037f9)) - -* Update version.py ([`91a3824`](https://github.com/datajoint/element-array-ephys/commit/91a382435fc5af8021718e54d57c908a1dc30418)) - -* bugfix ([`669c6e5`](https://github.com/datajoint/element-array-ephys/commit/669c6e53e0882b062cc23969a855205e906c2af0)) - -* improve variables naming in kilosort reader ([`e761501`](https://github.com/datajoint/element-array-ephys/commit/e7615017d168e1360fe0ab7e40c0958d9a9c97e4)) - -* improve variables naming ([`c002646`](https://github.com/datajoint/element-array-ephys/commit/c0026467259cfff561b41fabf7ce5d08e4352911)) - -* improve naming, comments ([`cd28d9b`](https://github.com/datajoint/element-array-ephys/commit/cd28d9b43e319777a0e97f6e79d403623902cb06)) - -* code-cleanup - variables renaming - addressing PR review's comments ([`eb7eb2c`](https://github.com/datajoint/element-array-ephys/commit/eb7eb2c4336fa7e4ed8d109e24e2eba02341b8f0)) - -* Merge pull request #17 from ttngu207/main - -specify a separate `get_clustering_root_data_dir()` - handle cases where raw ephys and clustering results are stored at different root locations (e.g. different mount points) ([`74a7a56`](https://github.com/datajoint/element-array-ephys/commit/74a7a5669f0aad4be3b430f93dd3efaad24af920)) - -* Merge branch 'main' of https://github.com/ttngu207/element-array-ephys into main ([`99d761f`](https://github.com/datajoint/element-array-ephys/commit/99d761fd17b4fb410f5729a380424424f1fe5d43)) - -* Apply suggestions from code review - improve docstring/comments - -Co-authored-by: shenshan <shenshanpku@gmail.com> ([`6f8cd8b`](https://github.com/datajoint/element-array-ephys/commit/6f8cd8b832af8861ac29f47ffae13036e1a90b36)) - -* remove Quality Control - will add this as a separate element ([`33a421b`](https://github.com/datajoint/element-array-ephys/commit/33a421b5298c784111a5e62613a1e8a018c48c1c)) - -* comment fix ([`af54831`](https://github.com/datajoint/element-array-ephys/commit/af54831d29596758c871d81128bc3a501cb25a98)) - -* naming bugfix ([`75d31a5`](https://github.com/datajoint/element-array-ephys/commit/75d31a5c36ecb575171ee77c7aecb31348533c08)) - -* rename "OpenEphys" to "Open Ephys" ([`bc2f528`](https://github.com/datajoint/element-array-ephys/commit/bc2f528b0bc8a94f21eb21f2c65d6fa8c5b5a409)) - -* added `QualityControl` as a master-table and make `ClusterQualityMetrics` the part-table now - no need for modified `key_source` ([`2c9a787`](https://github.com/datajoint/element-array-ephys/commit/2c9a787950c7ae97f87c4a1dc998565bdb0a65ee)) - -* Waveform table now a master table, with Waveform.Unit as part-table (no need for modified `key_source`) ([`31e2320`](https://github.com/datajoint/element-array-ephys/commit/31e2320955525b919246bd5aae6f85ef25ec30b7)) - -* openephys loader - code cleanup ([`033240f`](https://github.com/datajoint/element-array-ephys/commit/033240f97a049f8b6c23d17df4548252d89ae70d)) - -* creating Neuropixels probe (in ProbeType) as part of `probe` module activation ([`655115b`](https://github.com/datajoint/element-array-ephys/commit/655115bcc7a4530fea488d9737c12be42da046ee)) - -* tweaks to LFP and waveform ingestion - do in small batches to mitigate memory issue ([`dba0a48`](https://github.com/datajoint/element-array-ephys/commit/dba0a48726553913a77b6a65b9540713da505b73)) - -* minor updates to "jsiegle" PR - code, variable-naming cleanup - -Handle cases where the plugin is `Neuropix-PXI` but `NP_PROBE` is not present in `processor['EDITOR']` (only `PROBE`) ([`dcf8906`](https://github.com/datajoint/element-array-ephys/commit/dcf89069aa289d300779cb34bb4c3940be535bef)) - -* improve docstring/description ([`bebec1a`](https://github.com/datajoint/element-array-ephys/commit/bebec1ac563fc623fcc92125943ec410905230b2)) - -* 'enable_python_native_blobs' = True ([`fcb5983`](https://github.com/datajoint/element-array-ephys/commit/fcb5983a77981182670d3294cac60d82bc9bc501)) - -* Merge branch 'main' of https://github.com/datajoint/element-array-ephys into main ([`2adf2e0`](https://github.com/datajoint/element-array-ephys/commit/2adf2e06af7a89120d8d07cfb33926f216420bf2)) - -* Merge pull request #16 from jsiegle/main - -Update Open Ephys data reader ([`cf39185`](https://github.com/datajoint/element-array-ephys/commit/cf391854d0510ec1d4a903c478f880250523780a)) - -* Update Open Ephys data reader ([`a85e835`](https://github.com/datajoint/element-array-ephys/commit/a85e83532f017da405ac67fd7e5d135a52d07a9f)) - -* specify a separate `get_clustering_root_data_dir()` - handle cases where raw ephys and clustering results are stored a different root locations (e.g. different mount points) ([`ce90dc6`](https://github.com/datajoint/element-array-ephys/commit/ce90dc6b212d64ea158c6392390f813cfa7a4df7)) - -* add `version.py` ([`4185ba3`](https://github.com/datajoint/element-array-ephys/commit/4185ba3adb0ccfeac6e87b1220045ca5d8753fd7)) - -* Update .gitignore ([`f69e491`](https://github.com/datajoint/element-array-ephys/commit/f69e491c56e4e87b9a35c922a55a19658bd82030)) - -* Merge pull request #13 from ttngu207/main - -Mostly code cleanup - formatting ([`f07d131`](https://github.com/datajoint/element-array-ephys/commit/f07d13106edafdad87ae28f229907ff6847982e3)) - -* more code-cleanup ([`ea1547f`](https://github.com/datajoint/element-array-ephys/commit/ea1547fb4b31f748b9f5c2f92d622fcb752b1d73)) - -* Update setup.py ([`15ca803`](https://github.com/datajoint/element-array-ephys/commit/15ca803536aba0bb292d5ed3942ed85e1d4793e9)) - -* Update Background.md ([`0833d10`](https://github.com/datajoint/element-array-ephys/commit/0833d106cf4a6de9b0eb8acc9a5bf372fde1b979)) - -* cleanup ([`6c490f8`](https://github.com/datajoint/element-array-ephys/commit/6c490f8a96a09d0e681c63b36951d4def1fcbd7b)) - -* Update README.md ([`40ce9e6`](https://github.com/datajoint/element-array-ephys/commit/40ce9e68c5b88d842135390fa8378bb42d2a9947)) - -* rename `elements-ephys` -> `element-array-ephys` ([`fa369f0`](https://github.com/datajoint/element-array-ephys/commit/fa369f04c43e5f6e7cb68870bf58a1d8910888e0)) - -* Update README.md ([`a573e5c`](https://github.com/datajoint/element-array-ephys/commit/a573e5c257623bbdf93f29ec4d9a2184feab3162)) - -* Update Background.md ([`cf2f172`](https://github.com/datajoint/element-array-ephys/commit/cf2f172c2a6ee5729d913e4f882c7a7d3b30168d)) - -* Update Background.md ([`dfff966`](https://github.com/datajoint/element-array-ephys/commit/dfff966bf190d4d9d41bd6150346b52d44edf30b)) - -* added Background section ([`653b84f`](https://github.com/datajoint/element-array-ephys/commit/653b84f73b8131733cb33546fd3234e85078b800)) - -* code cleanup - formatting ([`7ab0c2a`](https://github.com/datajoint/element-array-ephys/commit/7ab0c2a4c1cca04be1271b593d1e944a565a64b3)) - -* Create CONTRIBUTING.md ([`1ee37ab`](https://github.com/datajoint/element-array-ephys/commit/1ee37ab341bd0959aea572321168646e9cc97dbf)) - -* Merge pull request #10 from ttngu207/main - -Ephys pipeline with support for multiple curations ([`983d61a`](https://github.com/datajoint/element-array-ephys/commit/983d61a89ccc42f114a51915261b443e2c2b153e)) - -* update diagrams ([`e98b34f`](https://github.com/datajoint/element-array-ephys/commit/e98b34f52aaa0a042810685f8b896c2288774131)) - -* Update requirements.txt ([`bab8e1d`](https://github.com/datajoint/element-array-ephys/commit/bab8e1d5cfbd0930323b8716d4eb80550a106bda)) - -* bugfix in spikeglx get original channels ([`f8244c8`](https://github.com/datajoint/element-array-ephys/commit/f8244c89ab86d83abad5ef870639180d6a751c4d)) - -* Merge branch 'multiple-curations' into main ([`bfab1dd`](https://github.com/datajoint/element-array-ephys/commit/bfab1dde4dc7b3620bd4cd0950460da71ac18a2e)) - -* bugfix in Unit ingestion ([`adfd5af`](https://github.com/datajoint/element-array-ephys/commit/adfd5af9632f7987a427d7ff07d926e85f90bff3)) - -* added a `CuratedClustering` as master table for `Unit` ([`7bd751a`](https://github.com/datajoint/element-array-ephys/commit/7bd751a8bc2574b14180eb39016cdb620358c4a5)) - -* Update openephys.py ([`a889407`](https://github.com/datajoint/element-array-ephys/commit/a8894072c7d84e375ef9ca458d7556703916bfaf)) - -* minor code cleanup ([`b0011a1`](https://github.com/datajoint/element-array-ephys/commit/b0011a18ee199afb878bbf8c152d526331a2a820)) - -* `Curation` downstream from `Clustering` - move `Curation` insertion in `Clustering.make()` to a separate utility function ([`6859e52`](https://github.com/datajoint/element-array-ephys/commit/6859e52ba4832f7dd714c3890552b243ecffd6c7)) - -* Merge branch 'main' into multiple-curations ([`64bd47d`](https://github.com/datajoint/element-array-ephys/commit/64bd47d72aa3bcd0de31d22a46db5be821ce88f1)) - -* prototype design for multiple curations ([`94686f5`](https://github.com/datajoint/element-array-ephys/commit/94686f5d2237f16a7cb9885f0ffb6fc11db49785)) - -* Merge pull request #9 from ttngu207/main - -keep `_timeseries` data as memmap int16 type, apply bitvolt conversion at LFP/Waveform extraction step & Bugfix in channel matching for SpikeGLX ([`70a813b`](https://github.com/datajoint/element-array-ephys/commit/70a813b207bba72bb3a268a797ef156a53c15c7a)) - -* Update elements_ephys/readers/spikeglx.py - -Co-authored-by: Dimitri Yatsenko <dimitri@vathes.com> ([`93ea01a`](https://github.com/datajoint/element-array-ephys/commit/93ea01a9bad217fad18a77a99b2df46b0986828c)) - -* minor formatting, PEP8 ([`d656108`](https://github.com/datajoint/element-array-ephys/commit/d65610889bba20fe468c5a97663769c3a97cf418)) - -* datajoint version 0.13+ required ([`39580e1`](https://github.com/datajoint/element-array-ephys/commit/39580e14f2ffc0d3772c3267e5525e8f9216a5b4)) - -* bugfix - openephys waveform extraction ([`825407c`](https://github.com/datajoint/element-array-ephys/commit/825407c5f3fae0def1291dfaf6b87bdaf14ea5f4)) - -* bugfix ([`4afc0f1`](https://github.com/datajoint/element-array-ephys/commit/4afc0f11e164281b91357f3ac07b8fb3d17cbce8)) - -* try-catch for searching/loading spikeglx files ([`f3d98b3`](https://github.com/datajoint/element-array-ephys/commit/f3d98b3b14a903c962037ca5406c9a3302475de3)) - -* keep `_timeseries` data as memmap int16 type, apply bitvolt conversion only when needed (at LFP or waveform extraction) ([`f9e5fc2`](https://github.com/datajoint/element-array-ephys/commit/f9e5fc291c170fcae905c9432d5f50f439a5e891)) - -* Update requirements.txt ([`625c630`](https://github.com/datajoint/element-array-ephys/commit/625c6307d9f9dbb97b953131a166782b230b0f4c)) - -* Update attached_ephys_element.svg ([`1411687`](https://github.com/datajoint/element-array-ephys/commit/1411687ad687fb75e3cb72831bbc580696d9a5ae)) - -* added svg diagram ([`7a0762c`](https://github.com/datajoint/element-array-ephys/commit/7a0762c18c28acdd4009c012eabd8d102b816f76)) - -* Merge pull request #8 from ttngu207/main - -ClusteringTask as manual table - Ingestion support for OpenEphys ([`f76086c`](https://github.com/datajoint/element-array-ephys/commit/f76086c611428ed4d8cc52edee6b240fb805779a)) - -* bugfix: Imax per probe type ([`56f8fdc`](https://github.com/datajoint/element-array-ephys/commit/56f8fdc43db8b1975a4cca46c1702a8670a190c2)) - -* code cleanup - renamed `data` -> `timeseries` ([`6d5ee8b`](https://github.com/datajoint/element-array-ephys/commit/6d5ee8bf68bfc45f9a760515fb399945c85fb6be)) - -* code cleanup, added docstring & comments to code blocks ([`e64dafe`](https://github.com/datajoint/element-array-ephys/commit/e64dafedd53de6ebf2243d6049982738f0e8d56b)) - -* Update spikeglx.py ([`238a511`](https://github.com/datajoint/element-array-ephys/commit/238a511d0030299b650868c78de05e428739a3e0)) - -* bugfix in waveform extraction ([`60e320d`](https://github.com/datajoint/element-array-ephys/commit/60e320d7973490bf3ae77ec0b6c9b86addbab921)) - -* added comment ([`be82f4e`](https://github.com/datajoint/element-array-ephys/commit/be82f4e9a5a262f73c427b5996bf7b3778e105ba)) - -* minor code cleanup ([`8aa11e2`](https://github.com/datajoint/element-array-ephys/commit/8aa11e231140f81fc10347f08a9f46e8c1e345b3)) - -* extract and apply bit-volts conversion for spikeglx loader ([`b5c11f0`](https://github.com/datajoint/element-array-ephys/commit/b5c11f04ae9b7b33fe93efd24fb292090e683d89)) - -* apply channels' gain for the data ([`8ceeb0b`](https://github.com/datajoint/element-array-ephys/commit/8ceeb0b8daea0f4d6d3c1aadf28930b50ae9fec9)) - -* remove `used_in_reference` in ElectrodeConfig - -this is misleading as it's only relevant for SpikeGLX acquisition for denoting channel visualization ([`847eeba`](https://github.com/datajoint/element-array-ephys/commit/847eeba4263c5a050ca7ffafa0cd4e891e099b21)) - -* bugfix in waveform extraction for OpenEphys ([`281e37b`](https://github.com/datajoint/element-array-ephys/commit/281e37b8c4c2da28fb7c94525be0db1b8eb495d4)) - -* bugfix in waveform ingestion ([`3452ab7`](https://github.com/datajoint/element-array-ephys/commit/3452ab721f0dc2022d1aaae0cb919e97cc25a8f8)) - -* code cleanup ([`3784238`](https://github.com/datajoint/element-array-ephys/commit/3784238c6ae8ed3b2c556544c054c3ca15e59e86)) - -* waveform ingestion for OpenEphys ([`1d02cf5`](https://github.com/datajoint/element-array-ephys/commit/1d02cf57ea04ced7a5b8062873069d5b4c473c72)) - -* extract_spike_waveforms() for OpenEphys ([`2d6f22c`](https://github.com/datajoint/element-array-ephys/commit/2d6f22c0a78a0ef7c617322fcc4658c045341ee1)) - -* implement "probe" in OpenEphys as a standalone class ([`045344d`](https://github.com/datajoint/element-array-ephys/commit/045344dc6ac38bf6482208065f95ff0b28aeedb9)) - -* minor bugfix in channel mapping/fetching ([`631837d`](https://github.com/datajoint/element-array-ephys/commit/631837d4e4f1c52a45105ee1817f397221a304cd)) - -* Update spikeglx.py ([`af2831c`](https://github.com/datajoint/element-array-ephys/commit/af2831ce1e4e278315644a2a7e5aab29fa495131)) - -* minor naming bugfix ([`e9d60d7`](https://github.com/datajoint/element-array-ephys/commit/e9d60d7088cc54b29c2b13ec5c5886fd77e5004a)) - -* rename `neuropixels` -> `spikeglx` ([`07982dc`](https://github.com/datajoint/element-array-ephys/commit/07982dc934a1103bdca1369da621cec393b26eea)) - -* LFP ingestion for OpenEphys ([`75149b3`](https://github.com/datajoint/element-array-ephys/commit/75149b3a9a6f3eed51977398ef037fbfe5de27ca)) - -* EphysRecording's `make()` handles OpenEphys ([`f784f12`](https://github.com/datajoint/element-array-ephys/commit/f784f12373eed355f5c45d16796ba9363abc75be)) - -* Update probe.py ([`628c7f0`](https://github.com/datajoint/element-array-ephys/commit/628c7f06bf4764f25c0f9113474e4cb1739e3f01)) - -* update ephys ingestion routine, refactor electrode config generation ([`2750aa9`](https://github.com/datajoint/element-array-ephys/commit/2750aa98b861d6426d7ee9335db7c81412f4ace0)) - -* openephys loader, using pyopenephys pkg ([`5540bbe`](https://github.com/datajoint/element-array-ephys/commit/5540bbe9a09fc5bc287f5973eff00f3766b9e8c3)) - -* Update neuropixels.py ([`eba6b8c`](https://github.com/datajoint/element-array-ephys/commit/eba6b8c1303fee1541b19f3ad72a4a88e54a18b3)) - -* openephys loader, using `open_ephys` pkg ([`a2ba6d6`](https://github.com/datajoint/element-array-ephys/commit/a2ba6d63753e9e302427728df6c23d74a45370a6)) - -* Update LICENSE ([`e29180f`](https://github.com/datajoint/element-array-ephys/commit/e29180fac4da203b47540c8f358bc489ba341993)) - -* Update openephys.py ([`2545772`](https://github.com/datajoint/element-array-ephys/commit/25457726e2faa3a8748ec7410e0e7a6b708b8cbc)) - -* `ClusteringTask` as manual table with user specified paramset_idx and clustering_output_dir ([`6850702`](https://github.com/datajoint/element-array-ephys/commit/6850702d2be133942391597c77805555fcca4216)) - -* Merge branch 'main' into OpenEphys-support ([`7d827a1`](https://github.com/datajoint/element-array-ephys/commit/7d827a11b8cea2952bb0a4d44b6285f1bd052ad9)) - -* infer/store ephys-recording directory, based on `session_dir` ([`38927c2`](https://github.com/datajoint/element-array-ephys/commit/38927c242c0a92323fb4080dac463b7e3ab3c693)) - -* Merge branch 'main' of https://github.com/datajoint/elements-ephys into main ([`8a16bf2`](https://github.com/datajoint/element-array-ephys/commit/8a16bf21ea6b6213b38fff0af7328a44195f2040)) - -* added AcquisitionSoftware ([`7de2127`](https://github.com/datajoint/element-array-ephys/commit/7de2127e0601c7817cf77fd993f6402729840ca5)) - -* minor bugfix: `probe.schema.activate` -> `probe.activate` ([`e278573`](https://github.com/datajoint/element-array-ephys/commit/e278573cd5c250ba9801ec2432f09e647ecb2428)) - -* Create open_ephys.py ([`a28c2da`](https://github.com/datajoint/element-array-ephys/commit/a28c2dac483c4f3366b185dfbd47b4c28c1f4e04)) - -* Merge pull request #7 from ttngu207/main - -update docstring for function `activate` ([`8893fc8`](https://github.com/datajoint/element-array-ephys/commit/8893fc800bfb224e28013d1475c19f59c669ea8d)) - -* update wording, `required_module` -> `linking_module` ([`071bf35`](https://github.com/datajoint/element-array-ephys/commit/071bf353e4376623298826af3187e0fc6c3837fa)) - -* update docstring for function `activate` ([`f11900f`](https://github.com/datajoint/element-array-ephys/commit/f11900f19d6735c0fd4bb4420a05d03670fd6b4e)) - -* Merge pull request #6 from ttngu207/main - -implement new "activation" mechanism -> using dict, module name or module for `requirement` ([`ec58e20`](https://github.com/datajoint/element-array-ephys/commit/ec58e20962689f2d87373209acd4bf07178bfeec)) - -* simplify "activate" no explicit requirements check ([`aa4064c`](https://github.com/datajoint/element-array-ephys/commit/aa4064cd22704fdb69b83ce6d793c1ba307b1a3a)) - -* minor format cleanup ([`822c5b7`](https://github.com/datajoint/element-array-ephys/commit/822c5b742e74ff36861ddd8a652b9bdd48bd03d8)) - -* implement new "activation" mechanism -> using dict, module name or module as 'requirement' ([`c9e7f1e`](https://github.com/datajoint/element-array-ephys/commit/c9e7f1e0b1cfbc77c6f7cffbb93b1bafbeeed731)) - -* bugfix in `paramset_name` -> `paramset_idx` ([`852f5a4`](https://github.com/datajoint/element-array-ephys/commit/852f5a471f0ed0703c774716c46f5484854e9e57)) - -* Merge pull request #5 from ttngu207/main - -minor tweak using `schema.database`, awaiting `schema.is_activated` ([`e9191dd`](https://github.com/datajoint/element-array-ephys/commit/e9191dd6c9c225874aa046ea75c0ea0acc581c17)) - -* minor tweak using `schema.database`, awaiting `schema.is_activated` ([`d606233`](https://github.com/datajoint/element-array-ephys/commit/d606233e80ab3c289c23d852977147823f8e09dc)) - -* Merge pull request #4 from dimitri-yatsenko/main - -ephys.activate inserts required functions into the module namespace ([`dadda0d`](https://github.com/datajoint/element-array-ephys/commit/dadda0d19ecc0afb7043e8ad888d918dacca0378)) - -* ephys.activate inserts required functions into the module namespace ([`1f732d3`](https://github.com/datajoint/element-array-ephys/commit/1f732d39ea11d03f88147fef645f29abc59eede5)) - -* Merge branch 'main' of https://github.com/datajoint/elements-ephys into main ([`3db8461`](https://github.com/datajoint/element-array-ephys/commit/3db84614d914863c9ec76752009bf8450f9439e5)) - -* Merge pull request #3 from ttngu207/main - -code cleanup, bug fix, tested ([`1cc5119`](https://github.com/datajoint/element-array-ephys/commit/1cc51196433e8ff6046f54cbd99162d0a7ed857b)) - -* code cleanup, bug fix, tested ([`11d2aec`](https://github.com/datajoint/element-array-ephys/commit/11d2aec8bbaf9bf2c396d86a7f6e513607476dbf)) - -* Merge pull request #2 from dimitri-yatsenko/main - -Refactor to use schema.activate from DataJoint 0.13 ([`5834b4a`](https://github.com/datajoint/element-array-ephys/commit/5834b4a75e38ffcd6dd1b8333f88300e0f2124cc)) - -* fix imported class names in the ephys module ([`b822e63`](https://github.com/datajoint/element-array-ephys/commit/b822e6313d0d73a2a4d7d26706d1ae712ad806ae)) - -* minor cleanup ([`5edc3ce`](https://github.com/datajoint/element-array-ephys/commit/5edc3ced1129c8807a422876b3da261c2f1d6c11)) - -* update to comply with datajoint 0.13 deferred schema use ([`a925450`](https://github.com/datajoint/element-array-ephys/commit/a925450db74e102dfd6a66dc55e53629a0d41765)) - -* Merge pull request #1 from ttngu207/main - -moved from `canonical-ephys` ([`d1decf2`](https://github.com/datajoint/element-array-ephys/commit/d1decf2ac4c5e021c6c63a554052ed72ee9a1379)) - -* moved from `canonical-ephys` ([`55f7717`](https://github.com/datajoint/element-array-ephys/commit/55f771729d06cd9a8346d4ed0882bd51ae603489)) - -* Create README.md ([`0896c85`](https://github.com/datajoint/element-array-ephys/commit/0896c85193a93550e19775c7c4b02b1fa5f7742f)) +[0.3.6]: https://github.com/datajoint/element-array-ephys/releases/tag/0.3.6 +[0.3.5]: https://github.com/datajoint/element-array-ephys/releases/tag/0.3.5 +[0.3.1]: https://github.com/datajoint/element-array-ephys/releases/tag/0.3.1 +[0.2.11]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.11 +[0.2.10]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.10 +[0.2.9]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.9 +[0.2.8]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.8 +[0.2.7]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.7 +[0.2.6]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.6 +[0.2.5]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.5 +[0.2.4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.4 +[0.2.3]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.3 +[0.2.2]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.2 +[0.2.1]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.1 +[0.2.0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.2.0 +[0.1.4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.4 +[0.1.3]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.3 +[0.1.2]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.2 +[0.1.1]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.1 +[0.1.0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0 +[0.1.0b4]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0b4 +[0.1.0b0]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0b0 +[0.1.0a5]: https://github.com/datajoint/element-array-ephys/releases/tag/0.1.0a5 diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 6b3c3b3f..39ba565b 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.3.7" +__version__ = "0.3.8" From 0fcb8c0075ecc660df75ff850c5ddf6bc783a345 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Thu, 16 Jan 2025 16:04:05 +0000 Subject: [PATCH 161/204] chore: typo --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1199cfc2..4a6570c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,4 @@ -# Release notes +# Changelog Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. From 77d001dc2d5e29f9f962857cbbab384cb23303cd Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Thu, 16 Jan 2025 17:50:29 +0000 Subject: [PATCH 162/204] fix: update changelog and semantic release --- .github/workflows/semantic-release-caller.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .github/workflows/semantic-release-caller.yml diff --git a/.github/workflows/semantic-release-caller.yml b/.github/workflows/semantic-release-caller.yml new file mode 100644 index 00000000..bfd7643d --- /dev/null +++ b/.github/workflows/semantic-release-caller.yml @@ -0,0 +1,10 @@ +name: semantic-release +on: + workflow_dispatch: + +jobs: + call_semantic_release: + uses: datajoint/.github/.github/workflows/semantic-release.yaml@main + secrets: + APP_ID: ${{ secrets.ELEMENT_APP_ID }} + GET_TOKEN_KEY: ${{ secrets.ELEMENT_GET_TOKEN_KEY }} From e4d959d74355a159bbe81589fc1ff799f7d85b6e Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 24 Jan 2025 16:02:33 -0600 Subject: [PATCH 163/204] update: version 0.4.0 --- CHANGELOG.md | 14 +++++++++++--- element_array_ephys/version.py | 2 +- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a6570c6..d7c03499 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,15 +3,23 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [v0.3.8] - 2025-01-16 +## [0.4.0] - 2025-01-24 + ++ Update - No longer support multiple variation of ephys module, keep only `ephys_no_curation` module, renamed to `ephys` ++ Update - Remove other ephys modules (e.g. `ephys_acute`, `ephys_chronic`) (moved to different branches) ++ Update - Add support for `SpikeInterface` ++ Update - Remove support for `ecephys_spike_sorting` (moved to a different branch) ++ Update - Simplify the "activate" mechanism + +## [0.3.8] - 2025-01-16 * Change - Revert GHA Semantic Release caller and update changelog -## [v0.3.7] - 2024-11-01 +## [0.3.7] - 2024-11-01 * Fix - robust IMAX value detection from IMEC file (metadata 3.0) -## [v0.3.6] - 2024-10-01 +## [0.3.6] - 2024-10-01 * Fix - Minor bugfix in reading npx probe model of older versions and in reaeding probe model * Add - New GitHub Action callers for devcontainer, mkdocs, and semantic release diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 39ba565b..2e6de55a 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.3.8" +__version__ = "0.4.0" From d4e08ee2612920d3b183b3de6488c7b94a59de4d Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Tue, 28 Jan 2025 17:59:28 +0100 Subject: [PATCH 164/204] fix: fix release titles in Changelog --- CHANGELOG.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4a6570c6..810a1ca1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,15 +3,15 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [v0.3.8] - 2025-01-16 +## [0.3.8] - 2025-01-16 -* Change - Revert GHA Semantic Release caller and update changelog +* Fix - Revert GHA Semantic Release caller and update changelog -## [v0.3.7] - 2024-11-01 +## [0.3.7] - 2024-11-01 * Fix - robust IMAX value detection from IMEC file (metadata 3.0) -## [v0.3.6] - 2024-10-01 +## [0.3.6] - 2024-10-01 * Fix - Minor bugfix in reading npx probe model of older versions and in reaeding probe model * Add - New GitHub Action callers for devcontainer, mkdocs, and semantic release @@ -24,7 +24,7 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and ## [0.3.4] - 2024-03-22 * Add - pytest -* Update - Ephys schema changed from `ephys_acute` to `ephys_no_curation` in `tutorial.ipynb` +* Fix - Fix regex patterns and add minimum version for scikit-image ## [0.3.3] - 2024-01-24 From 7dee0198974d6016207ece434967696658ac3925 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 Jan 2025 12:32:45 -0600 Subject: [PATCH 165/204] update: version 0.4.0 --- docs/src/concepts.md | 2 +- docs/src/index.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/concepts.md b/docs/src/concepts.md index cb57a802..b5da5081 100644 --- a/docs/src/concepts.md +++ b/docs/src/concepts.md @@ -68,7 +68,7 @@ existing spike sorting methods, named by Alessio Buccino, et al. SpikeInterface provides a convenient Python-based wrapper to invoke, extract, compare spike sorting results from different sorting algorithms. SpikeInterface is the primary tool supported by Element Array Electrophysiology for -spike sorting as of version `1.0.0`. +spike sorting as of version `0.4.0`. ## Key Partnerships diff --git a/docs/src/index.md b/docs/src/index.md index 0c828c00..5d9b7f19 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -13,7 +13,7 @@ The Element is comprised of `probe` and `ephys` schemas. Visit the `ephys` schemas and an explanation of the tables. To get started with building your own data pipeline, visit the [Tutorials page](./tutorials/index.md). -Prior to version `1.0.0` , several `ephys` schemas were +Prior to version `0.4.0` , several `ephys` schemas were developed and supported to handle various use cases of this pipeline and workflow. These are now deprecated but still available on their own branch within the repository: From ba258122842bcd06884533ad2d767c03fddef4cb Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 Jan 2025 12:38:44 -0600 Subject: [PATCH 166/204] style: black format --- element_array_ephys/ephys.py | 13 ++++++++++--- .../spike_sorting/si_spike_sorting.py | 8 ++++++-- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index a49b3f6e..efa377bf 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -1034,7 +1034,9 @@ def make(self, key): sorting_file, base_folder=output_dir ) if si_sorting_.unit_ids.size == 0: - logger.info(f"No units found in {sorting_file}. Skipping Unit ingestion...") + logger.info( + f"No units found in {sorting_file}. Skipping Unit ingestion..." + ) self.insert1(key) return @@ -1247,7 +1249,9 @@ def make(self, key): self.insert1(key) if not len(CuratedClustering.Unit & key): - logger.info(f"No CuratedClustering.Unit found for {key}, skipping Waveform ingestion.") + logger.info( + f"No CuratedClustering.Unit found for {key}, skipping Waveform ingestion." + ) return # Get channel and electrode-site mapping @@ -1303,6 +1307,7 @@ def yield_unit_waveforms(): ] yield unit_peak_waveform, unit_electrode_waveforms + else: # read from kilosort outputs (ecephys pipeline) kilosort_dataset = kilosort.Kilosort(output_dir) @@ -1510,7 +1515,9 @@ def make(self, key): self.insert1(key) if not len(CuratedClustering.Unit & key): - logger.info(f"No CuratedClustering.Unit found for {key}, skipping QualityMetrics ingestion.") + logger.info( + f"No CuratedClustering.Unit found for {key}, skipping QualityMetrics ingestion." + ) return si_sorting_analyzer_dir = output_dir / sorter_name / "sorting_analyzer" diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index b4d17a27..e3e797b6 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -290,7 +290,9 @@ def make(self, key): def _sorting_analyzer_compute(): if not has_units: log.info("No units found in sorting object. Skipping sorting analyzer.") - analyzer_output_dir.mkdir(parents=True, exist_ok=True) # create empty directory anyway, for consistency + analyzer_output_dir.mkdir( + parents=True, exist_ok=True + ) # create empty directory anyway, for consistency return # Sorting Analyzer @@ -316,7 +318,9 @@ def _sorting_analyzer_compute(): _sorting_analyzer_compute() - do_si_export = postprocessing_params.get("export_to_phy", False) or postprocessing_params.get("export_report", False) + do_si_export = postprocessing_params.get( + "export_to_phy", False + ) or postprocessing_params.get("export_report", False) self.insert1( { From 0ee86d2bed492ed686e510dbadb8b76d438f4403 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 28 Jan 2025 12:43:06 -0600 Subject: [PATCH 167/204] update(GHA): remove old python versions --- .github/workflows/test.yaml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 19c91e36..a463737f 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -15,12 +15,7 @@ jobs: strategy: matrix: py_ver: ["3.9", "3.10"] - mysql_ver: ["8.0", "5.7"] - include: - - py_ver: "3.8" - mysql_ver: "5.7" - - py_ver: "3.7" - mysql_ver: "5.7" + mysql_ver: ["8.0"] steps: - uses: actions/checkout@v3 - name: Set up Python ${{matrix.py_ver}} From 682ed1717821c047f443e2af6f7eb10f8200678a Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 13 Feb 2025 15:45:21 -0600 Subject: [PATCH 168/204] fix(ephys): skip_duplicates in Probe.insert - allow probe reuse --- element_array_ephys/ephys.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/element_array_ephys/ephys.py b/element_array_ephys/ephys.py index efa377bf..60ffa9b5 100644 --- a/element_array_ephys/ephys.py +++ b/element_array_ephys/ephys.py @@ -213,7 +213,7 @@ def auto_generate_entries(cls, session_key): else: raise NotImplementedError(f"Unknown acquisition software: {acq_software}") - probe.Probe.insert(probe_list) + probe.Probe.insert(probe_list, skip_duplicates=True) cls.insert(probe_insertion_list, skip_duplicates=True) From 4c0bfc9c5477bd3a22271576a315eecceb430035 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Thu, 13 Feb 2025 16:51:56 -0600 Subject: [PATCH 169/204] update: version + changelog --- CHANGELOG.md | 4 ++++ element_array_ephys/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d136a29..628a7742 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,10 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. +## [0.4.1] - 2025-02-13 + ++ Fix - Auto generate ProbeInsertion allows duplicate probes (e.g. probe reuse) + ## [0.4.0] - 2025-01-28 + Update - No longer support multiple variation of ephys module, keep only `ephys_no_curation` module, renamed to `ephys` diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 2e6de55a..0f62035b 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.4.0" +__version__ = "0.4.1" From ceeceeeb5549f9ae83086dca7cb2f61242593ba2 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Fri, 14 Feb 2025 15:12:17 +0000 Subject: [PATCH 170/204] fix: update element's test flow --- .github/workflows/test.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index a463737f..36199ef3 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -8,8 +8,6 @@ on: - main workflow_dispatch: jobs: - devcontainer-build: - uses: datajoint/.github/.github/workflows/devcontainer-build.yaml@main tests: runs-on: ubuntu-latest strategy: @@ -30,3 +28,4 @@ jobs: run: | python_version=${{matrix.py_ver}} black element_array_ephys --check --verbose --target-version py${python_version//.} + black notebooks --check --verbose --target-version py${python_version//.} From b4f0e2358cad2ab02ef17fa1eb78051bd2b5abc3 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Fri, 14 Feb 2025 15:44:51 +0000 Subject: [PATCH 171/204] black formatting --- notebooks/tutorial.ipynb | 224 ++++++++++++++++++++------------------- 1 file changed, 115 insertions(+), 109 deletions(-) diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb index 19b7b615..09d41f44 100644 --- a/notebooks/tutorial.ipynb +++ b/notebooks/tutorial.ipynb @@ -9,52 +9,53 @@ "\n", "#### Open-source data pipeline for processing and analyzing extracellular electrophysiology datasets.\n", "\n", - "Welcome to the tutorial for the DataJoint Element for extracellular array electrophysiology. This\n", - "tutorial aims to provide a comprehensive understanding of the open-source data pipeline\n", - "created using `element-array-ephys`.\n", + "Welcome to the tutorial for the DataJoint Element for extracellular array\n", + "electrophysiology. This tutorial aims to provide a comprehensive understanding of the\n", + "open-source data pipeline created using `element-array-ephys`.\n", "\n", - "This package is designed to seamlessly process, ingest, and track extracellular electrophysiology\n", - "data, along with its associated probe and recording metadata. By the end of this\n", - "tutorial you will have a clear grasp on setting up and integrating `element-array-ephys`\n", - "into your specific research projects and lab. \n", + "This package is designed to seamlessly process, ingest, and track extracellular\n", + "electrophysiology data, along with its associated probe and recording metadata. By the\n", + "end of this tutorial you will have a clear grasp on setting up and integrating\n", + "`element-array-ephys` into your specific research projects and lab.\n", "\n", "![flowchart](../images/diagram_flowchart.svg)\n", "\n", "### Prerequisites\n", "\n", - "Please see the [datajoint tutorials GitHub\n", - "repository](https://github.com/datajoint/datajoint-tutorials/tree/main) before\n", - "proceeding.\n", + "Please see the\n", + "[datajoint tutorials GitHub repository](https://github.com/datajoint/datajoint-tutorials/tree/main)\n", + "before proceeding.\n", "\n", "A basic understanding of the following DataJoint concepts will be beneficial to your\n", - "understanding of this tutorial: \n", + "understanding of this tutorial:\n", + "\n", "1. The `Imported` and `Computed` tables types in `datajoint-python`.\n", - "2. The functionality of the `.populate()` method. \n", + "2. The functionality of the `.populate()` method.\n", "\n", "#### **Tutorial Overview**\n", "\n", - "+ Setup\n", - "+ *Activate* the DataJoint pipeline.\n", - "+ *Insert* subject, session, and probe metadata.\n", - "+ *Populate* electrophysiology recording metadata.\n", - "+ Run the clustering task.\n", - "+ Curate the results (optional).\n", - "+ Visualize the results.\n", + "- Setup\n", + "- _Activate_ the DataJoint pipeline.\n", + "- _Insert_ subject, session, and probe metadata.\n", + "- _Populate_ electrophysiology recording metadata.\n", + "- Run the clustering task.\n", + "- Curate the results (optional).\n", + "- Visualize the results.\n", "\n", "### **Setup**\n", "\n", "This tutorial examines extracellular electrophysiology data acquired with `OpenEphys`\n", - "and spike-sorted using Kilosort 2.5. The goal is to store, track\n", - "and manage sessions of array electrophysiology data, including spike sorting results and\n", - "unit-level visualizations. \n", + "and spike-sorted using Kilosort 2.5. The goal is to store, track and manage sessions of\n", + "array electrophysiology data, including spike sorting results and unit-level\n", + "visualizations.\n", "\n", - "The results of this Element can be combined with **other modalities** to create\n", - "a complete, customizable data pipeline for your specific lab or study. For instance, you\n", + "The results of this Element can be combined with **other modalities** to create a\n", + "complete, customizable data pipeline for your specific lab or study. For instance, you\n", "can combine `element-array-ephys` with `element-calcium-imaging` and\n", "`element-deeplabcut` to characterize the neural activity along with markless\n", "pose-estimation during behavior.\n", "\n", - "Let's start this tutorial by importing the packages necessary to run the notebook." + "Let's start this tutorial by importing the packages necessary to run the notebook.\n" ] }, { @@ -75,7 +76,7 @@ "source": [ "If the tutorial is run in Codespaces, a private, local database server is created and\n", "made available for you. This is where we will insert and store our processed results.\n", - "Let's connect to the database server." + "Let's connect to the database server.\n" ] }, { @@ -113,8 +114,8 @@ "### **Activate the DataJoint Pipeline**\n", "\n", "This tutorial activates the `ephys_acute.py` module from `element-array-ephys`, along\n", - "with upstream dependencies from `element-animal` and `element-session`. Please refer to the\n", - "[`tutorial_pipeline.py`](./tutorial_pipeline.py) for the source code." + "with upstream dependencies from `element-animal` and `element-session`. Please refer to\n", + "the [`tutorial_pipeline.py`](./tutorial_pipeline.py) for the source code.\n" ] }, { @@ -131,7 +132,7 @@ "metadata": {}, "source": [ "We can represent the tables in the `probe` and `ephys` schemas as well as some of the\n", - "upstream dependencies to `session` and `subject` schemas as a diagram." + "upstream dependencies to `session` and `subject` schemas as a diagram.\n" ] }, { @@ -590,16 +591,18 @@ "metadata": {}, "source": [ "As evident from the diagram, this data pipeline encompasses tables associated with\n", - "recording and probe metadata, results of clustering. A few tables, such as `subject.Subject` or `session.Session`,\n", - "while important for a complete pipeline, fall outside the scope of the `element-array-ephys`\n", - "tutorial, and will therefore, not be explored extensively here. The primary focus of\n", - "this tutorial will be on the `probe` and `ephys` schemas.\n", + "recording and probe metadata, results of clustering. A few tables, such as\n", + "`subject.Subject` or `session.Session`, while important for a complete pipeline, fall\n", + "outside the scope of the `element-array-ephys` tutorial, and will therefore, not be\n", + "explored extensively here. The primary focus of this tutorial will be on the `probe` and\n", + "`ephys` schemas.\n", "\n", "### **Insert subject, session, and probe metadata**\n", "\n", "Let's start with the first table in the schema diagram (i.e. `subject.Subject` table).\n", "\n", - "To know what data to insert into the table, we can view its dependencies and attributes using the `.describe()` and `.heading` methods." + "To know what data to insert into the table, we can view its dependencies and attributes\n", + "using the `.describe()` and `.heading` methods.\n" ] }, { @@ -757,9 +760,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The cells above show all attributes of the subject table.\n", - "We will insert data into the\n", - "`subject.Subject` table. " + "The cells above show all attributes of the subject table. We will insert data into the\n", + "`subject.Subject` table.\n" ] }, { @@ -874,8 +876,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Let's repeat the steps above for the `Session` table and see how the output varies between\n", - "`.describe` and `.heading`." + "Let's repeat the steps above for the `Session` table and see how the output varies\n", + "between `.describe` and `.heading`.\n" ] }, { @@ -928,7 +930,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Notice that `describe`, displays the table's structure and highlights its dependencies, such as its reliance on the `Subject` table. These dependencies represent foreign key references, linking data across tables.\n", + "Notice that `describe`, displays the table's structure and highlights its dependencies,\n", + "such as its reliance on the `Subject` table. These dependencies represent foreign key\n", + "references, linking data across tables.\n", "\n", "On the other hand, `heading` provides an exhaustive list of the table's attributes. This\n", "list includes both the attributes declared in this table and any inherited from upstream\n", @@ -936,7 +940,8 @@ "\n", "With this understanding, let's move on to insert a session associated with our subject.\n", "\n", - "We will insert into the `session.Session` table by passing a dictionary to the `insert1` method." + "We will insert into the `session.Session` table by passing a dictionary to the `insert1`\n", + "method.\n" ] }, { @@ -1050,7 +1055,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Every experimental session produces a set of data files. The purpose of the `SessionDirectory` table is to locate these files. It references a directory path relative to a root directory, defined in `dj.config[\"custom\"]`. More information about `dj.config` is provided in the [documentation](https://datajoint.com/docs/elements/user-guide/)." + "Every experimental session produces a set of data files. The purpose of the\n", + "`SessionDirectory` table is to locate these files. It references a directory path\n", + "relative to a root directory, defined in `dj.config[\"custom\"]`. More information about\n", + "`dj.config` is provided in the\n", + "[documentation](https://datajoint.com/docs/elements/user-guide/).\n" ] }, { @@ -1158,10 +1167,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "As the Diagram indicates, the tables in the `probe` schemas need to\n", - "contain data before the tables in the `ephys` schema accept any data. Let's\n", - "start by inserting into `probe.Probe`, a table containing metadata about a\n", - "multielectrode probe. " + "As the Diagram indicates, the tables in the `probe` schemas need to contain data before\n", + "the tables in the `ephys` schema accept any data. Let's start by inserting into\n", + "`probe.Probe`, a table containing metadata about a multielectrode probe.\n" ] }, { @@ -1269,8 +1277,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The probe metadata is used by the downstream `ProbeInsertion` table which we\n", - "insert data into in the cells below:" + "The probe metadata is used by the downstream `ProbeInsertion` table which we insert data\n", + "into in the cells below:\n" ] }, { @@ -1432,7 +1440,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Confirm the inserted data:" + "Confirm the inserted data:\n" ] }, { @@ -1543,7 +1551,8 @@ "### **Populate electrophysiology recording metadata**\n", "\n", "In the upcoming cells, the `.populate()` method will automatically extract and store the\n", - "recording metadata for each experimental session in the `ephys.EphysRecording` table and its part table `ephys.EphysRecording.EphysFile`." + "recording metadata for each experimental session in the `ephys.EphysRecording` table and\n", + "its part table `ephys.EphysRecording.EphysFile`.\n" ] }, { @@ -1776,7 +1785,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Let's view the information was entered into each of these tables:" + "Let's view the information was entered into each of these tables:\n" ] }, { @@ -2010,7 +2019,7 @@ "`ClusteringParamSet`. This table keeps track of all combinations of your spike sorting\n", "parameters. You can choose which parameters are used during processing in a later step.\n", "\n", - "Let's view the attributes and insert data into `ephys.ClusteringParamSet`." + "Let's view the attributes and insert data into `ephys.ClusteringParamSet`.\n" ] }, { @@ -2180,11 +2189,11 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "DataJoint uses a `ClusteringTask` table to\n", - "manage which `EphysRecording` and `ClusteringParamSet` should be used during processing. \n", + "DataJoint uses a `ClusteringTask` table to manage which `EphysRecording` and\n", + "`ClusteringParamSet` should be used during processing.\n", "\n", - "This table is important for defining several important aspects of\n", - "downstream processing. Let's view the attributes to get a better understanding. " + "This table is important for defining several important aspects of downstream processing.\n", + "Let's view the attributes to get a better understanding.\n" ] }, { @@ -2219,13 +2228,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The `ClusteringTask` table contains two important attributes: \n", - "+ `paramset_idx` - Allows the user to choose the parameter set with which you want to\n", + "The `ClusteringTask` table contains two important attributes:\n", + "\n", + "- `paramset_idx` - Allows the user to choose the parameter set with which you want to\n", " run spike sorting.\n", - "+ `task_mode` - Can be set to `load` or `trigger`. When set to `load`, running the\n", + "- `task_mode` - Can be set to `load` or `trigger`. When set to `load`, running the\n", " Clustering step initiates a search for existing output files of the spike sorting\n", " algorithm defined in `ClusteringParamSet`. When set to `trigger`, the processing step\n", - " will run spike sorting on the raw data." + " will run spike sorting on the raw data.\n" ] }, { @@ -2249,7 +2259,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Let's call populate on the `Clustering` table which checks for kilosort results since `task_mode=load`." + "Let's call populate on the `Clustering` table which checks for kilosort results since\n", + "`task_mode=load`.\n" ] }, { @@ -2275,8 +2286,8 @@ "source": [ "### **Populate the results**\n", "\n", - "Once the `Clustering` table receives an entry, we can populate the remaining\n", - "tables in the workflow including `CuratedClustering`, `WaveformSet`, and `LFP`. " + "Once the `Clustering` table receives an entry, we can populate the remaining tables in\n", + "the workflow including `CuratedClustering`, `WaveformSet`, and `LFP`.\n" ] }, { @@ -2307,25 +2318,25 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Now that we've populated the tables in this DataJoint pipeline, there are one of\n", - "several next steps. If you have an existing pipeline for\n", - "aligning waveforms to behavior data or other stimuli, you can easily\n", - "invoke `element-event` or define your custom DataJoint tables to extend the\n", - "pipeline." + "Now that we've populated the tables in this DataJoint pipeline, there are one of several\n", + "next steps. If you have an existing pipeline for aligning waveforms to behavior data or\n", + "other stimuli, you can easily invoke `element-event` or define your custom DataJoint\n", + "tables to extend the pipeline.\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "### **Visualize the results**" + "### **Visualize the results**\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "In this tutorial, we will do some exploratory analysis by fetching the data from the database and creating a few plots." + "In this tutorial, we will do some exploratory analysis by fetching the data from the\n", + "database and creating a few plots.\n" ] }, { @@ -2342,10 +2353,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In the query above, we fetch a single `lfp_mean` attribute from the `LFP` table.\n", - "We also restrict the query to insertion number 1.\n", + "In the query above, we fetch a single `lfp_mean` attribute from the `LFP` table. We also\n", + "restrict the query to insertion number 1.\n", "\n", - "Let's go ahead and plot the LFP mean." + "Let's go ahead and plot the LFP mean.\n" ] }, { @@ -2377,15 +2388,16 @@ "metadata": {}, "source": [ "DataJoint queries are a highly flexible tool to manipulate and visualize your data.\n", - "After all, visualizing traces or generating rasters is likely just the start of\n", - "your analysis workflow. This can also make the queries seem more complex at\n", - "first. However, we'll walk through them slowly to simplify their content in this notebook. \n", + "After all, visualizing traces or generating rasters is likely just the start of your\n", + "analysis workflow. This can also make the queries seem more complex at first. However,\n", + "we'll walk through them slowly to simplify their content in this notebook.\n", "\n", "The examples below perform several operations using DataJoint queries:\n", + "\n", "- Fetch the primary key attributes of all units that are in `insertion_number=1`.\n", "- Use **multiple restrictions** to fetch timestamps and create a raster plot.\n", - "- Use a **join** operation and **multiple restrictions** to fetch a waveform\n", - " trace, along with unit data to create a single waveform plot" + "- Use a **join** operation and **multiple restrictions** to fetch a waveform trace,\n", + " along with unit data to create a single waveform plot\n" ] }, { @@ -2430,8 +2442,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Below we will use two queries to fetch *all* of the information about a single unit and\n", - "plot the unit waveform." + "Below we will use two queries to fetch _all_ of the information about a single unit and\n", + "plot the unit waveform.\n" ] }, { @@ -2694,33 +2706,32 @@ "source": [ "## Summary\n", "\n", - "Following this tutorial, we have: \n", - "+ Covered the essential functionality of `element-array-ephys`.\n", - "+ Learned how to manually insert data into tables.\n", - "+ Executed and ingested results of spike sorting with Kilosort.\n", - "+ Visualized the results. \n", + "Following this tutorial, we have:\n", + "\n", + "- Covered the essential functionality of `element-array-ephys`.\n", + "- Learned how to manually insert data into tables.\n", + "- Executed and ingested results of spike sorting with Kilosort.\n", + "- Visualized the results.\n", "\n", "#### Documentation and DataJoint Tutorials\n", "\n", - "+ [Detailed documentation on\n", - " `element-array-ephys`.](https://datajoint.com/docs/elements/element-array-ephys/)\n", - "+ [General `datajoint-python`\n", - " tutorials.](https://github.com/datajoint/datajoint-tutorials) covering fundamentals,\n", - " such as table tiers, query operations, fetch operations, automated computations with the\n", - " make function, and more.\n", - "+ [Documentation for\n", - " `datajoint-python`.](https://datajoint.com/docs/core/datajoint-python/)\n", + "- [Detailed documentation on `element-array-ephys`.](https://datajoint.com/docs/elements/element-array-ephys/)\n", + "- [General `datajoint-python` tutorials.](https://github.com/datajoint/datajoint-tutorials)\n", + " covering fundamentals, such as table tiers, query operations, fetch operations,\n", + " automated computations with the make function, and more.\n", + "- [Documentation for `datajoint-python`.](https://datajoint.com/docs/core/datajoint-python/)\n", "\n", "##### Run this tutorial on your own data\n", "\n", "To run this tutorial notebook on your own data, please use the following steps:\n", - "+ Download the [mysql-docker image for\n", - " DataJoint](https://github.com/datajoint/mysql-docker) and run the container according\n", - " to the instructions provide in the repository.\n", - "+ Create a fork of this repository to your GitHub account.\n", - "+ Clone the repository and open the files using your IDE.\n", - "+ Add a code cell immediately after the first code cell in the notebook - we will setup\n", - " the local connection using this cell. In this cell, type in the following code. \n", + "\n", + "- Download the\n", + " [mysql-docker image for DataJoint](https://github.com/datajoint/mysql-docker) and run\n", + " the container according to the instructions provide in the repository.\n", + "- Create a fork of this repository to your GitHub account.\n", + "- Clone the repository and open the files using your IDE.\n", + "- Add a code cell immediately after the first code cell in the notebook - we will setup\n", + " the local connection using this cell. In this cell, type in the following code.\n", "\n", "```python\n", "import datajoint as dj\n", @@ -2733,13 +2744,13 @@ "dj.conn()\n", "```\n", "\n", - "+ Run the code block above and proceed with the rest of the notebook." + "- Run the code block above and proceed with the rest of the notebook.\n" ] } ], "metadata": { "kernelspec": { - "display_name": "python3p10", + "display_name": "element-dlc", "language": "python", "name": "python3" }, @@ -2753,14 +2764,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.17" + "version": "3.9.18" }, - "orig_nbformat": 4, - "vscode": { - "interpreter": { - "hash": "ff52d424e56dd643d8b2ec122f40a2e279e94970100b4e6430cb9025a65ba4cf" - } - } + "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 From ebfdba01eb741379c2cf9d8b200c2e69bf21c4e4 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Fri, 14 Feb 2025 15:49:35 +0000 Subject: [PATCH 172/204] black formatting --- element_array_ephys/plotting/corr.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/element_array_ephys/plotting/corr.py b/element_array_ephys/plotting/corr.py index a8daa8cb..d94a0d9f 100644 --- a/element_array_ephys/plotting/corr.py +++ b/element_array_ephys/plotting/corr.py @@ -1,5 +1,4 @@ -"""Code adapted from International Brain Laboratory, T. (2021). ibllib [Computer software]. https://github.com/int-brain-lab/ibllib -""" +"""Code adapted from International Brain Laboratory, T. (2021). ibllib [Computer software]. https://github.com/int-brain-lab/ibllib""" import numpy as np From a0df46be93cf005e39463639b545cf698643cb0e Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 4 Mar 2025 17:04:13 -0600 Subject: [PATCH 173/204] update: install neo from source until 0.14.1 release --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 0ff4bf08..c08341ab 100644 --- a/setup.py +++ b/setup.py @@ -34,6 +34,7 @@ "openpyxl", "plotly", "seaborn", + "neo @ git+https://github.com/NeuralEnsemble/python-neo.git", # install neo from source until 0.14.1 release "spikeinterface", "scikit-image", "nbformat>=4.2.0", From 2454b967f523dc3c285831f0648d26837526a406 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Milagros=20Mar=C3=ADn?= Date: Tue, 18 Mar 2025 14:25:02 +0000 Subject: [PATCH 174/204] fix: add key_source to `ProbeLevelReport` to filter for 'good' quality units --- element_array_ephys/ephys_report.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/element_array_ephys/ephys_report.py b/element_array_ephys/ephys_report.py index 0c6836a0..819772ba 100644 --- a/element_array_ephys/ephys_report.py +++ b/element_array_ephys/ephys_report.py @@ -50,6 +50,12 @@ class ProbeLevelReport(dj.Computed): --- drift_map_plot: attach """ + + @property + def key_source(self): + return ephys.CuratedClustering & ( + ephys.CuratedClustering.Unit & "cluster_quality_label='good'" + ) def make(self, key): from .plotting.probe_level import plot_driftmap From a1bc2cf432665746bc47b3cab3eb6ee730ef595f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Milagros=20Mar=C3=ADn?= Date: Tue, 18 Mar 2025 14:27:42 +0000 Subject: [PATCH 175/204] bump version and update changelog --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 628a7742..a1b24488 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,10 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. +## [0.4.2] - 2025-03-18 + ++ Fix - Add key_source to `ProbeLevelReport` to filter for 'good' quality units + ## [0.4.1] - 2025-02-13 + Fix - Auto generate ProbeInsertion allows duplicate probes (e.g. probe reuse) From 4c01ac7adba4055307374b8f3af6be728dc9a911 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Milagros=20Mar=C3=ADn?= Date: Tue, 18 Mar 2025 14:25:02 +0000 Subject: [PATCH 176/204] fix: add key_source to `ProbeLevelReport` to filter for 'good' quality units From 80926c8eea251dfac4a3612681001ffccab440e6 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Tue, 25 Mar 2025 08:12:02 -0500 Subject: [PATCH 177/204] update: bump version --- CHANGELOG.md | 2 +- element_array_ephys/version.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a1b24488..0e3593d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [0.4.2] - 2025-03-18 +## [0.4.2] - 2025-03-25 + Fix - Add key_source to `ProbeLevelReport` to filter for 'good' quality units diff --git a/element_array_ephys/version.py b/element_array_ephys/version.py index 0f62035b..ae202a66 100644 --- a/element_array_ephys/version.py +++ b/element_array_ephys/version.py @@ -1,3 +1,3 @@ """Package metadata.""" -__version__ = "0.4.1" +__version__ = "0.4.2" From 58cdfaaf144de2f23f3abebb3a8875fe2642d023 Mon Sep 17 00:00:00 2001 From: Thinh Nguyen Date: Fri, 9 May 2025 07:02:27 -0500 Subject: [PATCH 178/204] chore: minor code optimization --- element_array_ephys/spike_sorting/si_spike_sorting.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/element_array_ephys/spike_sorting/si_spike_sorting.py b/element_array_ephys/spike_sorting/si_spike_sorting.py index e3e797b6..a324afa3 100644 --- a/element_array_ephys/spike_sorting/si_spike_sorting.py +++ b/element_array_ephys/spike_sorting/si_spike_sorting.py @@ -275,7 +275,7 @@ def make(self, key): postprocessing_params = params["SI_POSTPROCESSING_PARAMS"] - job_kwargs = postprocessing_params.get( + job_kwargs = postprocessing_params.pop( "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} ) @@ -366,7 +366,7 @@ def make(self, key): postprocessing_params = params["SI_POSTPROCESSING_PARAMS"] - job_kwargs = postprocessing_params.get( + job_kwargs = postprocessing_params.pop( "job_kwargs", {"n_jobs": -1, "chunk_duration": "1s"} ) @@ -383,6 +383,8 @@ def _export_to_phy(): sorting_analyzer=sorting_analyzer, output_folder=analyzer_output_dir / "phy", use_relative_path=True, + remove_if_exists=True, + copy_binary=True, **job_kwargs, ) @@ -395,6 +397,7 @@ def _export_report(): si.exporters.export_report( sorting_analyzer=sorting_analyzer, output_folder=analyzer_output_dir / "spikeinterface_report", + remove_if_exists=True, **job_kwargs, ) From 234f38ee758ae71e8b521e02b87cec6f89b23778 Mon Sep 17 00:00:00 2001 From: MilagrosMarin Date: Tue, 20 May 2025 00:44:04 +0100 Subject: [PATCH 179/204] fix(docs): update home URL from `datajoint.com/docs` to `docs.datajoint.com` --- .github/ISSUE_TEMPLATE/config.yml | 2 +- CONTRIBUTING.md | 2 +- README.md | 2 +- docs/mkdocs.yaml | 2 +- docs/src/.overrides/partials/nav.html | 7 +++---- docs/src/concepts.md | 4 ++-- docs/src/tutorials/index.md | 2 +- 7 files changed, 10 insertions(+), 11 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index d31fbace..74368549 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,5 @@ blank_issues_enabled: false contact_links: - name: DataJoint Contribution Guideline - url: https://docs.datajoint.org/python/community/02-Contribute.html + url: https://docs.datajoint.com/about/contribute/ about: Please make sure to review the DataJoint Contribution Guidelines \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2bd0f498..61a342d6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,5 @@ # Contribution Guidelines This project follows the -[DataJoint Contribution Guidelines](https://datajoint.com/docs/about/contribute/). +[DataJoint Contribution Guidelines](https://docs.datajoint.com/about/contribute/). Please reference the link for more full details. diff --git a/README.md b/README.md index 85277136..aad51cad 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ environment and notebooks to learn the pipeline. + [Interactive tutorial on GitHub Codespaces](https://github.com/datajoint/element-array-ephys#interactive-tutorial) -+ [Documentation](https://datajoint.com/docs/elements/element-array-ephys) ++ [Documentation](https://docs.datajoint.com/elements/element-array-ephys/) ## Support diff --git a/docs/mkdocs.yaml b/docs/mkdocs.yaml index e211069a..2fec112c 100644 --- a/docs/mkdocs.yaml +++ b/docs/mkdocs.yaml @@ -1,7 +1,7 @@ --- # ---------------------- PROJECT SPECIFIC --------------------------- site_name: DataJoint Documentation -site_url: http://localhost/docs/elements/element-array-ephys +site_url: https://docs.datajoint.com/elements/element-array-ephys/ repo_url: https://github.com/datajoint/element-array-ephys repo_name: datajoint/element-array-ephys nav: diff --git a/docs/src/.overrides/partials/nav.html b/docs/src/.overrides/partials/nav.html index cd6326b6..faac796a 100644 --- a/docs/src/.overrides/partials/nav.html +++ b/docs/src/.overrides/partials/nav.html @@ -7,15 +7,14 @@ {% endif %}