From 90b111339c3cf8629e38600844d4e51e219880d1 Mon Sep 17 00:00:00 2001 From: MattAitken Date: Wed, 30 Nov 2022 16:32:10 -0600 Subject: [PATCH 1/5] adds StimulusType and STIMULUS_TYPE_NAME_MAPPING, updates detection_parameters based on StimulusType --- .../gallery/analysis_examples/lsq_analysis.py | 2 +- .../analysis_examples/ramp_analysis.py | 2 +- .../short_square_analysis.py | 2 +- docs/stimuli.rst | 8 +- docs/tutorial.rst | 4 +- ipfx/bin/run_feature_collection.py | 6 +- ipfx/bin/run_feature_vector_extraction.py | 6 +- ipfx/data_set_features.py | 54 ++++++++----- ipfx/dataset/ephys_data_set.py | 6 -- ipfx/plot_qc_figures.py | 2 +- ipfx/qc_feature_evaluator.py | 2 +- ipfx/qc_feature_extractor.py | 10 +-- ipfx/script_utils.py | 7 +- ipfx/stimulus.py | 79 +++++++++++++------ .../pipeline_output.json | 4 +- .../pipeline_output.json | 4 +- .../pipeline_output.json | 4 +- 17 files changed, 122 insertions(+), 80 deletions(-) diff --git a/docs/gallery/analysis_examples/lsq_analysis.py b/docs/gallery/analysis_examples/lsq_analysis.py index 63f8b5cf..bd0634fd 100644 --- a/docs/gallery/analysis_examples/lsq_analysis.py +++ b/docs/gallery/analysis_examples/lsq_analysis.py @@ -32,7 +32,7 @@ # get sweep table of Long Square sweeps long_square_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.long_square_names + stimuli=data_set.ontology.LONG_SQUARE_NAMES ) long_square_sweeps = data_set.sweep_set(long_square_table.sweep_number) diff --git a/docs/gallery/analysis_examples/ramp_analysis.py b/docs/gallery/analysis_examples/ramp_analysis.py index af30480b..b2cb38fc 100644 --- a/docs/gallery/analysis_examples/ramp_analysis.py +++ b/docs/gallery/analysis_examples/ramp_analysis.py @@ -33,7 +33,7 @@ # get sweep table of Ramp sweeps ramp_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.ramp_names + stimuli=data_set.ontology.RAMP_NAMES ) ramp_sweeps = data_set.sweep_set(ramp_table.sweep_number) diff --git a/docs/gallery/analysis_examples/short_square_analysis.py b/docs/gallery/analysis_examples/short_square_analysis.py index 48a0208e..b3dac32d 100644 --- a/docs/gallery/analysis_examples/short_square_analysis.py +++ b/docs/gallery/analysis_examples/short_square_analysis.py @@ -31,7 +31,7 @@ drop_failed_sweeps(data_set) short_square_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.short_square_names + stimuli=data_set.ontology.SHORT_SQUARE_NAMES ) short_square_sweeps = data_set.sweep_set(short_square_table.sweep_number) diff --git a/docs/stimuli.rst b/docs/stimuli.rst index f87e1945..abc7df79 100644 --- a/docs/stimuli.rst +++ b/docs/stimuli.rst @@ -42,21 +42,21 @@ For example, Short Square stimuli are identified by the following name tags: .. code-block:: python - self.short_square_names = ( "Short Square", + self.SHORT_SQUARE_NAMES = ( "Short Square", "Short Square Threshold", "Short Square - Hold -60mV", "Short Square - Hold -70mV", "Short Square - Hold -80mV" ) that allows mapping the sweep with the stimulus code "``C1SSCOARSE150112``" to -the Short Square stimuli 'self.short_square_names'. +the Short Square stimuli 'self.SHORT_SQUARE_NAMES'. With the ontology defined, you can now filter :py:class:`~ipfx.dataset.ephys_data_set.EphysDataSet` sweeps by the stimulus type: .. code-block:: python short_square_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.long_square_names + stimuli=data_set.ontology.LONG_SQUARE_NAMES ) -that returns a table of metadata for the sweeps matching the ``self.short_square_names`` tags. +that returns a table of metadata for the sweeps matching the ``self.SHORT_SQUARE_NAMES`` tags. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 573c2722..58cb4d88 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -99,7 +99,7 @@ To create an instance of the :py:class:`~ipfx.dataset.ephys_data_set.EphysDataSe from ipfx.dataset.create import create_ephys_data_set dataset = create_ephys_data_set(nwb_file="path/to/experiment.nwb") - long_squares = dataset.filtered_sweep_table(stimuli=ds.ontology.long_square_names) # more on this next! + long_squares = dataset.filtered_sweep_table(stimuli=ds.ontology.LONG_SQUARE_NAMES) # more on this next! sweep_set = dataset.sweep_set(long_squares.sweep_number) where ``path/to/experiment.nwb`` is a local path to the nwb2 file that you have downloaded from the public archive. @@ -121,7 +121,7 @@ by filtering the ``sweep_table``: .. code-block:: python - long_squares = dataset.filtered_sweep_table(stimuli=dataset.ontology.long_square_names) # more on this next! + long_squares = dataset.filtered_sweep_table(stimuli=dataset.ontology.LONG_SQUARE_NAMES) # more on this next! sweep_set = dataset.sweep_set(long_squares.sweep_number) where ``dataset.ontology`` includes references to the names of all stimuli types known to ``IPFX``. diff --git a/ipfx/bin/run_feature_collection.py b/ipfx/bin/run_feature_collection.py index a68ca2b9..b64eead0 100644 --- a/ipfx/bin/run_feature_collection.py +++ b/ipfx/bin/run_feature_collection.py @@ -33,9 +33,9 @@ def data_for_specimen_id(specimen_id, passed_only, data_source, ontology, file_l return {} try: - lsq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.long_square_names) - ssq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.short_square_names) - ramp_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.ramp_names) + lsq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.LONG_SQUARE_NAMES) + ssq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.SHORT_SQUARE_NAMES) + ramp_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.RAMP_NAMES) except Exception as detail: logging.warn("Exception when processing specimen {:d}".format(specimen_id)) logging.warn(detail) diff --git a/ipfx/bin/run_feature_vector_extraction.py b/ipfx/bin/run_feature_vector_extraction.py index 306ba193..f2e41441 100644 --- a/ipfx/bin/run_feature_vector_extraction.py +++ b/ipfx/bin/run_feature_vector_extraction.py @@ -112,7 +112,7 @@ def data_for_specimen_id( # Identify and preprocess long square sweeps try: lsq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, - ontology.long_square_names, sweep_qc_option=sweep_qc_option, + ontology.LONG_SQUARE_NAMES, sweep_qc_option=sweep_qc_option, specimen_id=specimen_id) (lsq_sweeps, lsq_features, @@ -128,7 +128,7 @@ def data_for_specimen_id( # Identify and preprocess short square sweeps try: ssq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, - ontology.short_square_names, sweep_qc_option=sweep_qc_option, + ontology.SHORT_SQUARE_NAMES, sweep_qc_option=sweep_qc_option, specimen_id=specimen_id) ssq_sweeps, ssq_features, _ = su.preprocess_short_square_sweeps(data_set, ssq_sweep_numbers) @@ -140,7 +140,7 @@ def data_for_specimen_id( # Identify and preprocess ramp sweeps try: ramp_sweep_numbers = su.categorize_iclamp_sweeps(data_set, - ontology.ramp_names, sweep_qc_option=sweep_qc_option, + ontology.RAMP_NAMES, sweep_qc_option=sweep_qc_option, specimen_id=specimen_id) ramp_sweeps, ramp_features, _ = su.preprocess_ramp_sweeps(data_set, ramp_sweep_numbers) diff --git a/ipfx/data_set_features.py b/ipfx/data_set_features.py index b49fdc6b..e7a9df8a 100644 --- a/ipfx/data_set_features.py +++ b/ipfx/data_set_features.py @@ -35,9 +35,10 @@ # import functools import numpy as np +from collections import defaultdict import logging from .feature_extractor import SpikeFeatureExtractor,SpikeTrainFeatureExtractor -from ipfx.dataset.ephys_data_set import EphysDataSet +from ipfx.stimulus import StimulusOntology, StimulusType, get_stimulus_type from . import spike_features as spkf from . import stimulus_protocol_analysis as spa from . import stim_features as stf @@ -45,16 +46,17 @@ from . import error as er from . import logging_utils as lu -DEFAULT_DETECTION_PARAMETERS = { 'dv_cutoff': 20.0, 'thresh_frac': 0.05 } - -DETECTION_PARAMETERS = { - EphysDataSet.SHORT_SQUARE: {'thresh_frac_floor': 0.1 }, - EphysDataSet.RAMP: { }, - EphysDataSet.LONG_SQUARE: { } -} - - +DETECTION_PARAMETERS = defaultdict( + lambda: {}, + { + # To override detection parameters for specific StimulusType, add it here. + # If not explicitly listed, default detection parameters will be used (see extractors_for_sweeps()). + # See ipfx.stimulus for stimulus types + StimulusType.SHORT_SQUARE: {'thresh_frac_floor': 0.1 }, + StimulusType.CHIRP: {"filter_frequency": None} + } +) SUBTHRESHOLD_LONG_SQUARE_MIN_AMPS = { @@ -65,8 +67,18 @@ TEST_PULSE_DURATION_SEC = 0.4 -def detection_parameters(stimulus_name): - return DETECTION_PARAMETERS.get(stimulus_name, {}) +def detection_parameters(stimulus_type): + return DETECTION_PARAMETERS[stimulus_type] + + +def detection_parameters_from_stimulus_name(stimulus_name): + try: + stimulus_type = get_stimulus_type(stimulus_name) + dp = detection_parameters(stimulus_type) + except ValueError as e: + logging.warning(f"Warning: {e}\nUsing default detection parameters") + dp = detection_parameters(None) + return dp def record_errors(fn): @@ -102,6 +114,7 @@ def extractors_for_sweeps(sweep_set, dv_cutoff=20., thresh_frac=0.05, reject_at_stim_start_interval=0, min_peak=-30, + filter_frequency=10., thresh_frac_floor=None, est_window=None, start=None, end=None): @@ -139,6 +152,7 @@ def extractors_for_sweeps(sweep_set, start=start, end=end, min_peak=min_peak, + filter=filter_frequency, reject_at_stim_start_interval=reject_at_stim_start_interval) stfx = SpikeTrainFeatureExtractor(start, end) @@ -162,7 +176,7 @@ def extract_sweep_features(data_set, sweep_table): sweep_set = data_set.sweep_set(sweep_numbers) sweep_set.align_to_start_of_epoch("experiment") - dp = detection_parameters(stimulus_name).copy() + dp = detection_parameters_from_stimulus_name(stimulus_name).copy() for k in [ "start", "end" ]: if k in dp: dp.pop(k) @@ -219,7 +233,7 @@ def extract_cell_long_square_features(data_set, subthresh_min_amp=None): lu.log_pretty_header("Long Squares:", level=2) long_square_sweep_numbers = data_set.get_sweep_numbers( - data_set.ontology.long_square_names, + data_set.ontology.LONG_SQUARE_NAMES, clamp_mode=data_set.CURRENT_CLAMP) if len(long_square_sweep_numbers) == 0: raise er.FeatureError("No long_square sweeps available for feature extraction") @@ -227,7 +241,7 @@ def extract_cell_long_square_features(data_set, subthresh_min_amp=None): if subthresh_min_amp is None: clsq_sweeps = data_set.filtered_sweep_table( clamp_mode=data_set.CURRENT_CLAMP, - stimuli=data_set.ontology.coarse_long_square_names) + stimuli=data_set.ontology.COARSE_LONG_SQUARE_NAMES) clsq_sweep_numbers = clsq_sweeps['sweep_number'].sort_values().values if len(clsq_sweep_numbers) > 0: subthresh_min_amp, clsq_amp_delta = select_subthreshold_min_amplitude(clsq_sweeps['stimulus_amplitude']) @@ -246,7 +260,7 @@ def extract_cell_long_square_features(data_set, subthresh_min_amp=None): lsq_spx, lsq_spfx = extractors_for_sweeps(lsq_sweeps, start=lsq_start, end=lsq_start+lsq_dur, - **detection_parameters(data_set.LONG_SQUARE)) + **detection_parameters(StimulusType.LONG_SQUARE)) lsq_an = spa.LongSquareAnalysis(lsq_spx, lsq_spfx, subthresh_min_amp=subthresh_min_amp) @@ -270,7 +284,7 @@ def extract_cell_short_square_features(data_set): lu.log_pretty_header("Short Squares:", level=2) short_square_sweep_numbers = data_set.get_sweep_numbers( - data_set.ontology.short_square_names, + data_set.ontology.SHORT_SQUARE_NAMES, clamp_mode=data_set.CURRENT_CLAMP) if len(short_square_sweep_numbers) == 0: raise er.FeatureError("No short square sweeps available for feature extraction") @@ -285,7 +299,7 @@ def extract_cell_short_square_features(data_set): SSQ_WINDOW = 0.001 ssq_spx, ssq_spfx = extractors_for_sweeps(ssq_sweeps, est_window=[ssq_start, ssq_start+SSQ_WINDOW], - **detection_parameters(data_set.SHORT_SQUARE)) + **detection_parameters(StimulusType.SHORT_SQUARE)) ssq_an = spa.ShortSquareAnalysis(ssq_spx, ssq_spfx) @@ -303,7 +317,7 @@ def extract_cell_ramp_features(data_set): lu.log_pretty_header("Ramps:", level=2) ramp_sweep_numbers = data_set.get_sweep_numbers( - data_set.ontology.ramp_names, + data_set.ontology.RAMP_NAMES, clamp_mode=data_set.CURRENT_CLAMP) if len(ramp_sweep_numbers) == 0: raise er.FeatureError("No ramp sweeps available for feature extraction") @@ -315,7 +329,7 @@ def extract_cell_ramp_features(data_set): ramp_spx, ramp_spfx = extractors_for_sweeps(ramp_sweeps, start=ramp_start, - **detection_parameters(data_set.RAMP)) + **detection_parameters(StimulusType.RAMP)) ramp_an = spa.RampAnalysis(ramp_spx, ramp_spfx) ramp_features = ramp_an.analyze(ramp_sweeps) return ramp_an.as_dict( diff --git a/ipfx/dataset/ephys_data_set.py b/ipfx/dataset/ephys_data_set.py index 345bb68d..953ef259 100644 --- a/ipfx/dataset/ephys_data_set.py +++ b/ipfx/dataset/ephys_data_set.py @@ -32,12 +32,6 @@ class EphysDataSet(object): SWEEP_NUMBER, ] - LONG_SQUARE = 'long_square' - COARSE_LONG_SQUARE = 'coarse_long_square' - SHORT_SQUARE_TRIPLE = 'short_square_triple' - SHORT_SQUARE = 'short_square' - RAMP = 'ramp' - VOLTAGE_CLAMP = "VoltageClamp" CURRENT_CLAMP = "CurrentClamp" diff --git a/ipfx/plot_qc_figures.py b/ipfx/plot_qc_figures.py index da361bd1..83baa691 100644 --- a/ipfx/plot_qc_figures.py +++ b/ipfx/plot_qc_figures.py @@ -448,7 +448,7 @@ def plot_instantaneous_threshold_thumbnail(data_set, sweep_numbers, cell_feature def plot_ramp_figures(data_set, cell_features, lims_features, sweep_features, image_dir, sizes, cell_image_files): ramps_sweeps = data_set.filtered_sweep_table(clamp_mode=data_set.CURRENT_CLAMP, - stimuli=data_set.ontology.ramp_names) + stimuli=data_set.ontology.RAMP_NAMES) ramps_sweeps = np.sort(ramps_sweeps['sweep_number'].values) figs = [] diff --git a/ipfx/qc_feature_evaluator.py b/ipfx/qc_feature_evaluator.py index 3d164634..bb3ac830 100644 --- a/ipfx/qc_feature_evaluator.py +++ b/ipfx/qc_feature_evaluator.py @@ -66,7 +66,7 @@ def qc_sweeps(ontology, sweep_features, qc_criteria): for sweep in sweep_features: sweep_num = sweep["sweep_number"] - is_ramp = ontology.stimulus_has_any_tags(sweep["stimulus_code"], ontology.ramp_names) + is_ramp = ontology.stimulus_has_any_tags(sweep["stimulus_code"], ontology.RAMP_NAMES) fail_tags = qc_current_clamp_sweep(sweep, is_ramp, qc_criteria) sweep_state = sp.create_sweep_state(sweep_num, fail_tags) sweep_states.append(sweep_state) diff --git a/ipfx/qc_feature_extractor.py b/ipfx/qc_feature_extractor.py index 05868a34..8579881f 100644 --- a/ipfx/qc_feature_extractor.py +++ b/ipfx/qc_feature_extractor.py @@ -25,7 +25,7 @@ def extract_blowout(data_set, tags): ontology = data_set.ontology try: - blowout_sweep_number = data_set.get_sweep_numbers(ontology.blowout_names)[-1] + blowout_sweep_number = data_set.get_sweep_numbers(ontology.BLOWOUT_NAMES)[-1] blowout_data = data_set.sweep(blowout_sweep_number) _,test_end_idx = blowout_data.epochs["test"] blowout_mv = qcf.measure_blowout(blowout_data.v, test_end_idx) @@ -54,7 +54,7 @@ def extract_electrode_0(data_set, tags): ontology = data_set.ontology try: - bath_sweep_number = data_set.get_sweep_numbers(ontology.bath_names)[-1] + bath_sweep_number = data_set.get_sweep_numbers(ontology.BATH_NAMES)[-1] bath_data = data_set.sweep(bath_sweep_number) e0 = qcf.measure_electrode_0(bath_data.i, bath_data.sampling_rate) @@ -96,7 +96,7 @@ def extract_clamp_seal(data_set, tags, manual_values=None): ontology = data_set.ontology try: - seal_sweep_number = data_set.get_sweep_numbers(ontology.seal_names,"VoltageClamp")[-1] + seal_sweep_number = data_set.get_sweep_numbers(ontology.SEAL_NAMES,"VoltageClamp")[-1] seal_data = data_set.sweep(seal_sweep_number) seal_gohm = qcf.measure_seal(seal_data.v, @@ -148,7 +148,7 @@ def extract_input_and_access_resistance(data_set, tags, manual_values=None): try: - breakin_sweep_number = data_set.get_sweep_numbers(ontology.breakin_names, "VoltageClamp")[-1] + breakin_sweep_number = data_set.get_sweep_numbers(ontology.BREAKIN_NAMES, "VoltageClamp")[-1] breakin_data = data_set.sweep(breakin_sweep_number) except IndexError as e: tags.append("Breakin sweep not found") @@ -276,7 +276,7 @@ def sweep_qc_features(data_set): sweep_num = sweep_info['sweep_number'] sweep = data_set.sweep(sweep_num) - is_ramp = sweep_info['stimulus_name'] in ontology.ramp_names + is_ramp = sweep_info['stimulus_name'] in ontology.RAMP_NAMES tags = check_sweep_integrity(sweep, is_ramp) sweep_features["tags"] = tags diff --git a/ipfx/script_utils.py b/ipfx/script_utils.py index a93cfced..f7ec36e5 100644 --- a/ipfx/script_utils.py +++ b/ipfx/script_utils.py @@ -15,6 +15,7 @@ import ipfx.data_set_features as dsf import ipfx.time_series_utils as tsu import ipfx.error as er +from ipfx.stimulus import StimulusType from ipfx.sweep import SweepSet from ipfx.dataset.create import create_ephys_data_set @@ -221,7 +222,7 @@ def preprocess_long_square_sweeps(data_set, sweep_numbers, extra_dur=0.2, subthr start=lsq_start, end=lsq_end, min_peak=-25, - **dsf.detection_parameters(data_set.LONG_SQUARE) + **dsf.detection_parameters(StimulusType.LONG_SQUARE) ) lsq_an = spa.LongSquareAnalysis(lsq_spx, lsq_spfx, subthresh_min_amp=subthresh_min_amp) @@ -243,7 +244,7 @@ def preprocess_short_square_sweeps(data_set, sweep_numbers, extra_dur=0.2, spike start=ssq_start, end=ssq_end + spike_window, reject_at_stim_start_interval=0.0002, - **dsf.detection_parameters(data_set.SHORT_SQUARE)) + **dsf.detection_parameters(StimulusType.SHORT_SQUARE)) ssq_an = spa.ShortSquareAnalysis(ssq_spx, ssq_spfx) ssq_features = ssq_an.analyze(ssq_sweeps) @@ -259,7 +260,7 @@ def preprocess_ramp_sweeps(data_set, sweep_numbers): ramp_start, ramp_dur, _, _, _ = stf.get_stim_characteristics(ramp_sweeps.sweeps[0].i, ramp_sweeps.sweeps[0].t) ramp_spx, ramp_spfx = dsf.extractors_for_sweeps(ramp_sweeps, start = ramp_start, - **dsf.detection_parameters(data_set.RAMP)) + **dsf.detection_parameters(StimulusType.RAMP)) ramp_an = spa.RampAnalysis(ramp_spx, ramp_spfx) ramp_features = ramp_an.analyze(ramp_sweeps) diff --git a/ipfx/stimulus.py b/ipfx/stimulus.py index ae264f30..89c54170 100644 --- a/ipfx/stimulus.py +++ b/ipfx/stimulus.py @@ -4,6 +4,58 @@ import allensdk.core.json_utilities as ju +from enum import Enum + +_STIMULUS_TYPE_NAME_MAPPING = { + # Maps stimulus type to set of names + "ramp": {"Ramp"}, + "long_square": { + "Long Square", + "Long Square Threshold", + "Long Square SupraThreshold", + "Long Square SubThreshold", + }, + "coarse_long_square": { + "C1LSCOARSE", + }, + "short_square_triple": { + "Short Square - Triple", + }, + "short_square": { + "Short Square", + "Short Square Threshold", + "Short Square - Hold -60mV", + "Short Square - Hold -70mV", + "Short Square - Hold -80mV", + }, + "chirp": { + "Chirp", + "ChirpA", + "ChirpB", + "ChirpC", + "ChirpD", + }, + "search": {"Search"}, + "test": {"Test"}, + "blowout": {"EXTPBLWOUT"}, + "bath": {"EXTPINBATH"}, + "seal": {"EXTPCllATT"}, + "breakin": {"EXTPBREAKN"}, + "extp": {"EXTP"} +} + +StimulusType = Enum("StimulusType", [(k.upper(), k) for k in _STIMULUS_TYPE_NAME_MAPPING.keys()]) +# e.g. ipfx.stimulus.StimulusType.detection_parameters(StimulusType. => + +STIMULUS_TYPE_NAME_MAPPING = {StimulusType(stim_type): stim_names for stim_type, stim_names in _STIMULUS_TYPE_NAME_MAPPING.items()} + + +def get_stimulus_type(stimulus_name): + for stim_type, stim_names in STIMULUS_TYPE_NAME_MAPPING.items(): + if stimulus_name in stim_names: + return stim_type + else: + raise ValueError(f"stimulus_name {stimulus_name} not found.\nSTIMULUS_TYPE_NAME_MAPPING: {STIMULUS_TYPE_NAME_MAPPING}") class Stimulus(object): @@ -45,29 +97,10 @@ def __init__(self, stim_ontology_tags=None): self.stimuli = list(Stimulus(s) for s in stim_ontology_tags) - self.ramp_names = ( "Ramp",) - - self.long_square_names = ( "Long Square", - "Long Square Threshold", - "Long Square SupraThreshold", - "Long Square SubThreshold" ) - - self.coarse_long_square_names = ( "C1LSCOARSE",) - self.short_square_triple_names = ( "Short Square - Triple", ) - - self.short_square_names = ( "Short Square", - "Short Square Threshold", - "Short Square - Hold -60mV", - "Short Square - Hold -70mV", - "Short Square - Hold -80mV" ) - - self.search_names = ("Search",) - self.test_names = ("Test",) - self.blowout_names = ( 'EXTPBLWOUT', ) - self.bath_names = ( 'EXTPINBATH', ) - self.seal_names = ( 'EXTPCllATT', ) - self.breakin_names = ( 'EXTPBREAKN', ) - self.extp_names = ( 'EXTP', ) + for stimulus_type, names in _STIMULUS_TYPE_NAME_MAPPING.items(): + # Sets helper attributes like StimulusOntology.RAMP_NAMES + setattr(self, f"{stimulus_type.upper()}_NAMES", names) + def find(self, tag, tag_type=None): """ diff --git a/tests/data/specimens/Ctgf-T2A-dgCre;Ai14-495723.05.02.01/pipeline_output.json b/tests/data/specimens/Ctgf-T2A-dgCre;Ai14-495723.05.02.01/pipeline_output.json index c0c55bc2..0c2df9cc 100644 --- a/tests/data/specimens/Ctgf-T2A-dgCre;Ai14-495723.05.02.01/pipeline_output.json +++ b/tests/data/specimens/Ctgf-T2A-dgCre;Ai14-495723.05.02.01/pipeline_output.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9d5b2334ada0dda7979d8d51802d4f6e9abbb604b658cbf126434cd44f538d0b -size 379538 +oid sha256:368bcc976f11437ccea47c7533b3b0669ce7471d3c7a0f51f1b051d74f9c6524 +size 380671 diff --git a/tests/data/specimens/Pvalb-IRES-Cre;Ai14(IVSCC)-165172.05.02/pipeline_output.json b/tests/data/specimens/Pvalb-IRES-Cre;Ai14(IVSCC)-165172.05.02/pipeline_output.json index e7ccaa62..3b5a965e 100644 --- a/tests/data/specimens/Pvalb-IRES-Cre;Ai14(IVSCC)-165172.05.02/pipeline_output.json +++ b/tests/data/specimens/Pvalb-IRES-Cre;Ai14(IVSCC)-165172.05.02/pipeline_output.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:eaa60680677db222fdbf7d6c566740c4070a2be2fc752fa61b02e9d754f21e4c -size 1652539 +oid sha256:99982b633670a238baa4758c9026fb2091321800c473e1f3590ce3d108ff15b6 +size 1652494 diff --git a/tests/data/specimens/Vip-IRES-Cre;Ai14-331294.04.01.01/pipeline_output.json b/tests/data/specimens/Vip-IRES-Cre;Ai14-331294.04.01.01/pipeline_output.json index 274b8c9d..2fceb01d 100644 --- a/tests/data/specimens/Vip-IRES-Cre;Ai14-331294.04.01.01/pipeline_output.json +++ b/tests/data/specimens/Vip-IRES-Cre;Ai14-331294.04.01.01/pipeline_output.json @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:c8c50a031da78bf365ff8349cfe6d09bf292115c9a3fc6fe2a7a7829940a6e82 -size 440266 +oid sha256:022ab74919eff869aa9893c241d736909ef4cdda762fd9e7feb12c729fc630fa +size 441882 From a458a248b383929f4f861024c9d552e397052c54 Mon Sep 17 00:00:00 2001 From: MattAitken Date: Thu, 1 Dec 2022 16:06:44 -0600 Subject: [PATCH 2/5] updating Chirp names --- ipfx/stimulus.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ipfx/stimulus.py b/ipfx/stimulus.py index 89c54170..2577c8b8 100644 --- a/ipfx/stimulus.py +++ b/ipfx/stimulus.py @@ -30,10 +30,10 @@ }, "chirp": { "Chirp", - "ChirpA", - "ChirpB", - "ChirpC", - "ChirpD", + "Chirp A Threshold", + "Chirp B - Hold -65mV", + "Chirp C - Hold -60mV", + "Chirp D - Hold -55mV", }, "search": {"Search"}, "test": {"Test"}, From 086d36e0ab22cbfd93952ebb6c8b5f1256897d13 Mon Sep 17 00:00:00 2001 From: MattAitken Date: Sun, 4 Dec 2022 20:12:49 -0600 Subject: [PATCH 3/5] roll back some breaking changes --- .../gallery/analysis_examples/lsq_analysis.py | 2 +- .../analysis_examples/ramp_analysis.py | 2 +- .../short_square_analysis.py | 2 +- docs/stimuli.rst | 8 +-- docs/tutorial.rst | 4 +- ipfx/bin/run_feature_collection.py | 6 +- ipfx/bin/run_feature_vector_extraction.py | 6 +- ipfx/data_set_features.py | 8 +-- ipfx/defaults/stimulus_ontology.json | 15 ++++ ipfx/plot_qc_figures.py | 2 +- ipfx/qc_feature_evaluator.py | 2 +- ipfx/qc_feature_extractor.py | 10 +-- ipfx/stimulus.py | 69 +++++++++++++------ 13 files changed, 88 insertions(+), 48 deletions(-) diff --git a/docs/gallery/analysis_examples/lsq_analysis.py b/docs/gallery/analysis_examples/lsq_analysis.py index bd0634fd..63f8b5cf 100644 --- a/docs/gallery/analysis_examples/lsq_analysis.py +++ b/docs/gallery/analysis_examples/lsq_analysis.py @@ -32,7 +32,7 @@ # get sweep table of Long Square sweeps long_square_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.LONG_SQUARE_NAMES + stimuli=data_set.ontology.long_square_names ) long_square_sweeps = data_set.sweep_set(long_square_table.sweep_number) diff --git a/docs/gallery/analysis_examples/ramp_analysis.py b/docs/gallery/analysis_examples/ramp_analysis.py index b2cb38fc..af30480b 100644 --- a/docs/gallery/analysis_examples/ramp_analysis.py +++ b/docs/gallery/analysis_examples/ramp_analysis.py @@ -33,7 +33,7 @@ # get sweep table of Ramp sweeps ramp_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.RAMP_NAMES + stimuli=data_set.ontology.ramp_names ) ramp_sweeps = data_set.sweep_set(ramp_table.sweep_number) diff --git a/docs/gallery/analysis_examples/short_square_analysis.py b/docs/gallery/analysis_examples/short_square_analysis.py index b3dac32d..48a0208e 100644 --- a/docs/gallery/analysis_examples/short_square_analysis.py +++ b/docs/gallery/analysis_examples/short_square_analysis.py @@ -31,7 +31,7 @@ drop_failed_sweeps(data_set) short_square_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.SHORT_SQUARE_NAMES + stimuli=data_set.ontology.short_square_names ) short_square_sweeps = data_set.sweep_set(short_square_table.sweep_number) diff --git a/docs/stimuli.rst b/docs/stimuli.rst index abc7df79..f87e1945 100644 --- a/docs/stimuli.rst +++ b/docs/stimuli.rst @@ -42,21 +42,21 @@ For example, Short Square stimuli are identified by the following name tags: .. code-block:: python - self.SHORT_SQUARE_NAMES = ( "Short Square", + self.short_square_names = ( "Short Square", "Short Square Threshold", "Short Square - Hold -60mV", "Short Square - Hold -70mV", "Short Square - Hold -80mV" ) that allows mapping the sweep with the stimulus code "``C1SSCOARSE150112``" to -the Short Square stimuli 'self.SHORT_SQUARE_NAMES'. +the Short Square stimuli 'self.short_square_names'. With the ontology defined, you can now filter :py:class:`~ipfx.dataset.ephys_data_set.EphysDataSet` sweeps by the stimulus type: .. code-block:: python short_square_table = data_set.filtered_sweep_table( - stimuli=data_set.ontology.LONG_SQUARE_NAMES + stimuli=data_set.ontology.long_square_names ) -that returns a table of metadata for the sweeps matching the ``self.SHORT_SQUARE_NAMES`` tags. +that returns a table of metadata for the sweeps matching the ``self.short_square_names`` tags. diff --git a/docs/tutorial.rst b/docs/tutorial.rst index 58cb4d88..573c2722 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -99,7 +99,7 @@ To create an instance of the :py:class:`~ipfx.dataset.ephys_data_set.EphysDataSe from ipfx.dataset.create import create_ephys_data_set dataset = create_ephys_data_set(nwb_file="path/to/experiment.nwb") - long_squares = dataset.filtered_sweep_table(stimuli=ds.ontology.LONG_SQUARE_NAMES) # more on this next! + long_squares = dataset.filtered_sweep_table(stimuli=ds.ontology.long_square_names) # more on this next! sweep_set = dataset.sweep_set(long_squares.sweep_number) where ``path/to/experiment.nwb`` is a local path to the nwb2 file that you have downloaded from the public archive. @@ -121,7 +121,7 @@ by filtering the ``sweep_table``: .. code-block:: python - long_squares = dataset.filtered_sweep_table(stimuli=dataset.ontology.LONG_SQUARE_NAMES) # more on this next! + long_squares = dataset.filtered_sweep_table(stimuli=dataset.ontology.long_square_names) # more on this next! sweep_set = dataset.sweep_set(long_squares.sweep_number) where ``dataset.ontology`` includes references to the names of all stimuli types known to ``IPFX``. diff --git a/ipfx/bin/run_feature_collection.py b/ipfx/bin/run_feature_collection.py index b64eead0..a68ca2b9 100644 --- a/ipfx/bin/run_feature_collection.py +++ b/ipfx/bin/run_feature_collection.py @@ -33,9 +33,9 @@ def data_for_specimen_id(specimen_id, passed_only, data_source, ontology, file_l return {} try: - lsq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.LONG_SQUARE_NAMES) - ssq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.SHORT_SQUARE_NAMES) - ramp_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.RAMP_NAMES) + lsq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.long_square_names) + ssq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.short_square_names) + ramp_sweep_numbers = su.categorize_iclamp_sweeps(data_set, ontology.ramp_names) except Exception as detail: logging.warn("Exception when processing specimen {:d}".format(specimen_id)) logging.warn(detail) diff --git a/ipfx/bin/run_feature_vector_extraction.py b/ipfx/bin/run_feature_vector_extraction.py index f2e41441..306ba193 100644 --- a/ipfx/bin/run_feature_vector_extraction.py +++ b/ipfx/bin/run_feature_vector_extraction.py @@ -112,7 +112,7 @@ def data_for_specimen_id( # Identify and preprocess long square sweeps try: lsq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, - ontology.LONG_SQUARE_NAMES, sweep_qc_option=sweep_qc_option, + ontology.long_square_names, sweep_qc_option=sweep_qc_option, specimen_id=specimen_id) (lsq_sweeps, lsq_features, @@ -128,7 +128,7 @@ def data_for_specimen_id( # Identify and preprocess short square sweeps try: ssq_sweep_numbers = su.categorize_iclamp_sweeps(data_set, - ontology.SHORT_SQUARE_NAMES, sweep_qc_option=sweep_qc_option, + ontology.short_square_names, sweep_qc_option=sweep_qc_option, specimen_id=specimen_id) ssq_sweeps, ssq_features, _ = su.preprocess_short_square_sweeps(data_set, ssq_sweep_numbers) @@ -140,7 +140,7 @@ def data_for_specimen_id( # Identify and preprocess ramp sweeps try: ramp_sweep_numbers = su.categorize_iclamp_sweeps(data_set, - ontology.RAMP_NAMES, sweep_qc_option=sweep_qc_option, + ontology.ramp_names, sweep_qc_option=sweep_qc_option, specimen_id=specimen_id) ramp_sweeps, ramp_features, _ = su.preprocess_ramp_sweeps(data_set, ramp_sweep_numbers) diff --git a/ipfx/data_set_features.py b/ipfx/data_set_features.py index e7a9df8a..034bf9ef 100644 --- a/ipfx/data_set_features.py +++ b/ipfx/data_set_features.py @@ -233,7 +233,7 @@ def extract_cell_long_square_features(data_set, subthresh_min_amp=None): lu.log_pretty_header("Long Squares:", level=2) long_square_sweep_numbers = data_set.get_sweep_numbers( - data_set.ontology.LONG_SQUARE_NAMES, + data_set.ontology.long_square_names, clamp_mode=data_set.CURRENT_CLAMP) if len(long_square_sweep_numbers) == 0: raise er.FeatureError("No long_square sweeps available for feature extraction") @@ -241,7 +241,7 @@ def extract_cell_long_square_features(data_set, subthresh_min_amp=None): if subthresh_min_amp is None: clsq_sweeps = data_set.filtered_sweep_table( clamp_mode=data_set.CURRENT_CLAMP, - stimuli=data_set.ontology.COARSE_LONG_SQUARE_NAMES) + stimuli=data_set.ontology.coarse_long_square_names) clsq_sweep_numbers = clsq_sweeps['sweep_number'].sort_values().values if len(clsq_sweep_numbers) > 0: subthresh_min_amp, clsq_amp_delta = select_subthreshold_min_amplitude(clsq_sweeps['stimulus_amplitude']) @@ -284,7 +284,7 @@ def extract_cell_short_square_features(data_set): lu.log_pretty_header("Short Squares:", level=2) short_square_sweep_numbers = data_set.get_sweep_numbers( - data_set.ontology.SHORT_SQUARE_NAMES, + data_set.ontology.short_square_names, clamp_mode=data_set.CURRENT_CLAMP) if len(short_square_sweep_numbers) == 0: raise er.FeatureError("No short square sweeps available for feature extraction") @@ -317,7 +317,7 @@ def extract_cell_ramp_features(data_set): lu.log_pretty_header("Ramps:", level=2) ramp_sweep_numbers = data_set.get_sweep_numbers( - data_set.ontology.RAMP_NAMES, + data_set.ontology.ramp_names, clamp_mode=data_set.CURRENT_CLAMP) if len(ramp_sweep_numbers) == 0: raise er.FeatureError("No ramp sweeps available for feature extraction") diff --git a/ipfx/defaults/stimulus_ontology.json b/ipfx/defaults/stimulus_ontology.json index f55da7da..7f5145ae 100644 --- a/ipfx/defaults/stimulus_ontology.json +++ b/ipfx/defaults/stimulus_ontology.json @@ -2043,5 +2043,20 @@ "core", "Core 1" ] + ], + [ + [ + "code", + "C2CHIRPA", + "C2CHIRPA180503" + ], + [ + "core", + "Core 2" + ], + [ + "name", + "Chirp A Threshold" + ] ] ] \ No newline at end of file diff --git a/ipfx/plot_qc_figures.py b/ipfx/plot_qc_figures.py index 83baa691..da361bd1 100644 --- a/ipfx/plot_qc_figures.py +++ b/ipfx/plot_qc_figures.py @@ -448,7 +448,7 @@ def plot_instantaneous_threshold_thumbnail(data_set, sweep_numbers, cell_feature def plot_ramp_figures(data_set, cell_features, lims_features, sweep_features, image_dir, sizes, cell_image_files): ramps_sweeps = data_set.filtered_sweep_table(clamp_mode=data_set.CURRENT_CLAMP, - stimuli=data_set.ontology.RAMP_NAMES) + stimuli=data_set.ontology.ramp_names) ramps_sweeps = np.sort(ramps_sweeps['sweep_number'].values) figs = [] diff --git a/ipfx/qc_feature_evaluator.py b/ipfx/qc_feature_evaluator.py index bb3ac830..3d164634 100644 --- a/ipfx/qc_feature_evaluator.py +++ b/ipfx/qc_feature_evaluator.py @@ -66,7 +66,7 @@ def qc_sweeps(ontology, sweep_features, qc_criteria): for sweep in sweep_features: sweep_num = sweep["sweep_number"] - is_ramp = ontology.stimulus_has_any_tags(sweep["stimulus_code"], ontology.RAMP_NAMES) + is_ramp = ontology.stimulus_has_any_tags(sweep["stimulus_code"], ontology.ramp_names) fail_tags = qc_current_clamp_sweep(sweep, is_ramp, qc_criteria) sweep_state = sp.create_sweep_state(sweep_num, fail_tags) sweep_states.append(sweep_state) diff --git a/ipfx/qc_feature_extractor.py b/ipfx/qc_feature_extractor.py index 8579881f..05868a34 100644 --- a/ipfx/qc_feature_extractor.py +++ b/ipfx/qc_feature_extractor.py @@ -25,7 +25,7 @@ def extract_blowout(data_set, tags): ontology = data_set.ontology try: - blowout_sweep_number = data_set.get_sweep_numbers(ontology.BLOWOUT_NAMES)[-1] + blowout_sweep_number = data_set.get_sweep_numbers(ontology.blowout_names)[-1] blowout_data = data_set.sweep(blowout_sweep_number) _,test_end_idx = blowout_data.epochs["test"] blowout_mv = qcf.measure_blowout(blowout_data.v, test_end_idx) @@ -54,7 +54,7 @@ def extract_electrode_0(data_set, tags): ontology = data_set.ontology try: - bath_sweep_number = data_set.get_sweep_numbers(ontology.BATH_NAMES)[-1] + bath_sweep_number = data_set.get_sweep_numbers(ontology.bath_names)[-1] bath_data = data_set.sweep(bath_sweep_number) e0 = qcf.measure_electrode_0(bath_data.i, bath_data.sampling_rate) @@ -96,7 +96,7 @@ def extract_clamp_seal(data_set, tags, manual_values=None): ontology = data_set.ontology try: - seal_sweep_number = data_set.get_sweep_numbers(ontology.SEAL_NAMES,"VoltageClamp")[-1] + seal_sweep_number = data_set.get_sweep_numbers(ontology.seal_names,"VoltageClamp")[-1] seal_data = data_set.sweep(seal_sweep_number) seal_gohm = qcf.measure_seal(seal_data.v, @@ -148,7 +148,7 @@ def extract_input_and_access_resistance(data_set, tags, manual_values=None): try: - breakin_sweep_number = data_set.get_sweep_numbers(ontology.BREAKIN_NAMES, "VoltageClamp")[-1] + breakin_sweep_number = data_set.get_sweep_numbers(ontology.breakin_names, "VoltageClamp")[-1] breakin_data = data_set.sweep(breakin_sweep_number) except IndexError as e: tags.append("Breakin sweep not found") @@ -276,7 +276,7 @@ def sweep_qc_features(data_set): sweep_num = sweep_info['sweep_number'] sweep = data_set.sweep(sweep_num) - is_ramp = sweep_info['stimulus_name'] in ontology.RAMP_NAMES + is_ramp = sweep_info['stimulus_name'] in ontology.ramp_names tags = check_sweep_integrity(sweep, is_ramp) sweep_features["tags"] = tags diff --git a/ipfx/stimulus.py b/ipfx/stimulus.py index 2577c8b8..64c2f6ae 100644 --- a/ipfx/stimulus.py +++ b/ipfx/stimulus.py @@ -6,49 +6,61 @@ from enum import Enum -_STIMULUS_TYPE_NAME_MAPPING = { + +class StimulusType(Enum): + RAMP = "ramp" + LONG_SQUARE = "long_square" + COARSE_LONG_SQUARE = "coarse_long_square" + SHORT_SQUARE_TRIPLE = "short_square_triple" + SHORT_SQUARE = "short_square" + CHIRP = "chirp" + SEARCH = "search" + TEST = "test" + BLOWOUT = "blowout" + BATH = "bath" + SEAL = "seal" + BREAKIN = "breakin" + EXTP = "extp" + + +STIMULUS_TYPE_NAME_MAPPING = { # Maps stimulus type to set of names - "ramp": {"Ramp"}, - "long_square": { + StimulusType.RAMP: {"Ramp"}, + StimulusType.LONG_SQUARE: { "Long Square", "Long Square Threshold", "Long Square SupraThreshold", "Long Square SubThreshold", }, - "coarse_long_square": { + StimulusType.COARSE_LONG_SQUARE: { "C1LSCOARSE", }, - "short_square_triple": { + StimulusType.SHORT_SQUARE_TRIPLE: { "Short Square - Triple", }, - "short_square": { + StimulusType.SHORT_SQUARE: { "Short Square", "Short Square Threshold", "Short Square - Hold -60mV", "Short Square - Hold -70mV", "Short Square - Hold -80mV", }, - "chirp": { + StimulusType.CHIRP: { "Chirp", "Chirp A Threshold", "Chirp B - Hold -65mV", "Chirp C - Hold -60mV", "Chirp D - Hold -55mV", }, - "search": {"Search"}, - "test": {"Test"}, - "blowout": {"EXTPBLWOUT"}, - "bath": {"EXTPINBATH"}, - "seal": {"EXTPCllATT"}, - "breakin": {"EXTPBREAKN"}, - "extp": {"EXTP"} + StimulusType.SEARCH: {"Search"}, + StimulusType.TEST: {"Test"}, + StimulusType.BLOWOUT: {"EXTPBLWOUT"}, + StimulusType.BATH: {"EXTPINBATH"}, + StimulusType.SEAL: {"EXTPCllATT"}, + StimulusType.BREAKIN: {"EXTPBREAKN"}, + StimulusType.EXTP: {"EXTP"} } -StimulusType = Enum("StimulusType", [(k.upper(), k) for k in _STIMULUS_TYPE_NAME_MAPPING.keys()]) -# e.g. ipfx.stimulus.StimulusType.detection_parameters(StimulusType. => - -STIMULUS_TYPE_NAME_MAPPING = {StimulusType(stim_type): stim_names for stim_type, stim_names in _STIMULUS_TYPE_NAME_MAPPING.items()} - def get_stimulus_type(stimulus_name): for stim_type, stim_names in STIMULUS_TYPE_NAME_MAPPING.items(): @@ -97,9 +109,22 @@ def __init__(self, stim_ontology_tags=None): self.stimuli = list(Stimulus(s) for s in stim_ontology_tags) - for stimulus_type, names in _STIMULUS_TYPE_NAME_MAPPING.items(): - # Sets helper attributes like StimulusOntology.RAMP_NAMES - setattr(self, f"{stimulus_type.upper()}_NAMES", names) + # Must match Stimulus Type Name Mapping, e.g + # for stimulus_type, names in _STIMULUS_TYPE_NAME_MAPPING.items(): + # setattr(self, f"{stimulus_type.upper()}_NAMES", names) + self.ramp_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.RAMP] + self.long_square_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.LONG_SQUARE] + self.coarse_long_square_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.COARSE_LONG_SQUARE] + self.short_square_triple_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.SHORT_SQUARE_TRIPLE] + self.short_square_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.SHORT_SQUARE] + self.chirp_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.CHIRP] + self.search_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.SEARCH] + self.test_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.TEST] + self.blowout_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.BLOWOUT] + self.bath_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.BATH] + self.seal_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.SEAL] + self.breakin_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.BREAKIN] + self.extp_names = STIMULUS_TYPE_NAME_MAPPING[StimulusType.EXTP] def find(self, tag, tag_type=None): From 74c64b01afef5b5297a5bfca2ca6bc0412fd4dd3 Mon Sep 17 00:00:00 2001 From: Matt Aitken Date: Mon, 5 Dec 2022 20:41:51 -0700 Subject: [PATCH 4/5] Update version.txt --- ipfx/version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ipfx/version.txt b/ipfx/version.txt index af0b7ddb..238d6e88 100644 --- a/ipfx/version.txt +++ b/ipfx/version.txt @@ -1 +1 @@ -1.0.6 +1.0.7 From 011bca89028a2cc83a643be6cc819d317669111d Mon Sep 17 00:00:00 2001 From: Matt Aitken Date: Mon, 5 Dec 2022 20:48:11 -0700 Subject: [PATCH 5/5] Update CHANGELOG.md --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 42502abe..c93713b3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,15 @@ All notable changes to this project will be documented in this file. ### Changed +## [1.0.7] = 2022-12-5 +Changed: +- Added StimulusType and STIMULUS_TYPE_NAME_MAPPING to stimulus ontology, replacing definitions in EphysDataset +- Updated data_set_features to use correct sweep feature extractor detection parameters based on StimulusType + +## [1.0.6] = 2022-6-29 +Changed: +- Stop IPFX from caching its NWB Schemas when writing/modifying NWB files + ## [1.0.5] = 2021-12-13 Bug fixes: - Converts nwb_version attribute to string if it is in utf-8 encoded bytes.