From e867b94b222e99d7ab1da8e382d87b3263a5c311 Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Fri, 10 Nov 2023 17:51:45 +0100 Subject: [PATCH 1/4] Feature/remove pandas bridge (#530) * remove module pandas_bridge.py * remove unittests test_pandas_bridge.py * remove pandas from rquirements-extras * remove pandas from documentation, modules.rst * remove pandas from environment.yml * remove pandas from documentation pandas_bridge.rst * remove pandas from environment-tests.yml * remove pandas from environment-docs.yml --- doc/modules.rst | 1 - doc/reference/pandas_bridge.rst | 5 - elephant/pandas_bridge.py | 629 ------ elephant/test/test_pandas_bridge.py | 3092 -------------------------- requirements/environment-docs.yml | 1 - requirements/environment-tests.yml | 1 - requirements/environment.yml | 1 - requirements/requirements-extras.txt | 1 - 8 files changed, 3731 deletions(-) delete mode 100644 doc/reference/pandas_bridge.rst delete mode 100644 elephant/pandas_bridge.py delete mode 100644 elephant/test/test_pandas_bridge.py diff --git a/doc/modules.rst b/doc/modules.rst index 96cc8bc18..d076db1e2 100644 --- a/doc/modules.rst +++ b/doc/modules.rst @@ -96,5 +96,4 @@ Miscellaneous reference/neo_tools reference/utils - reference/pandas_bridge reference/parallel diff --git a/doc/reference/pandas_bridge.rst b/doc/reference/pandas_bridge.rst deleted file mode 100644 index 2fb5966e1..000000000 --- a/doc/reference/pandas_bridge.rst +++ /dev/null @@ -1,5 +0,0 @@ -============================ -Bridge to the pandas library -============================ - -.. automodule:: elephant.pandas_bridge diff --git a/elephant/pandas_bridge.py b/elephant/pandas_bridge.py deleted file mode 100644 index 1163e0368..000000000 --- a/elephant/pandas_bridge.py +++ /dev/null @@ -1,629 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Bridge to the pandas library. - -.. autosummary:: - :toctree: _toctree/pandas_bridge - - spiketrain_to_dataframe - event_to_dataframe - epoch_to_dataframe - multi_spiketrains_to_dataframe - multi_events_to_dataframe - multi_epochs_to_dataframe - slice_spiketrain - -:copyright: Copyright 2014-2023 by the Elephant team, see `doc/authors.rst`. -:license: Modified BSD, see LICENSE.txt for details. -""" - -from __future__ import division, print_function, unicode_literals - -import numpy as np -import pandas as pd -import warnings -import quantities as pq - -from elephant.neo_tools import (extract_neo_attributes, get_all_epochs, - get_all_events, get_all_spiketrains) - - -warnings.simplefilter('once', DeprecationWarning) -warnings.warn("pandas_bridge module will be removed in Elephant v0.8.x", - DeprecationWarning) - - -def _multiindex_from_dict(inds): - """Given a dictionary, return a `pandas.MultiIndex`. - - Parameters - ---------- - inds : dict - A dictionary where the keys are annotations or attribute names and - the values are the corresponding annotation or attribute value. - - Returns - ------- - pandas MultiIndex - """ - names, indexes = zip(*sorted(inds.items())) - return pd.MultiIndex.from_tuples([indexes], names=names) - - -def _sort_inds(obj, axis=0): - """Put the indexes and index levels of a pandas object in sorted order. - - Paramters - --------- - obj : pandas Series, DataFrame, Panel, or Panel4D - The object whose indexes should be sorted. - axis : int, list, optional, 'all' - The axis whose indexes should be sorted. Default is 0. - Can also be a list of indexes, in which case all of those axes - are sorted. If 'all', sort all indexes. - - Returns - ------- - pandas Series, DataFrame, Panel, or Panel4D - A copy of the object with indexes sorted. - Indexes are sorted in-place. - """ - if axis == 'all': - return _sort_inds(obj, axis=range(obj.ndim)) - - if hasattr(axis, '__iter__'): - for iax in axis: - obj = _sort_inds(obj, iax) - return obj - - obj = obj.reorder_levels(sorted(obj.axes[axis].names), axis=axis) - return obj.sort_index(level=0, axis=axis, sort_remaining=True) - - -def _extract_neo_attrs_safe(obj, parents=True, child_first=True): - """Given a neo object, return a dictionary of attributes and annotations. - - This is done in a manner that is safe for `pandas` indexes. - - Parameters - ---------- - - obj : neo object - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - dict - A dictionary where the keys are annotations or attribute names and - the values are the corresponding annotation or attribute value. - - """ - res = extract_neo_attributes(obj, skip_array=True, skip_none=True, - parents=parents, child_first=child_first) - for key, value in res.items(): - res[key] = _convert_value_safe(value) - key2 = _convert_value_safe(key) - if key2 is not key: - res[key2] = res.pop(key) - - return res - - -def _convert_value_safe(value): - """Convert `neo` values to a value compatible with `pandas`. - - Some types and dtypes used with neo are not safe to use with pandas in some - or all situations. - - `quantities.Quantity` don't follow the normal python rule that values - with that are equal should have the same hash, making it fundamentally - incompatible with `pandas`. - - On python 3, `pandas` coerces `S` dtypes to bytes, which are not always - safe to use. - - Parameters - ---------- - - value : any - Value to convert (if it has any known issues). - - Returns - ------- - - any - `value` or a version of value with potential problems fixed. - - """ - if hasattr(value, 'dimensionality'): - return (value.magnitude.tolist(), str(value.dimensionality)) - if hasattr(value, 'dtype') and value.dtype.kind == 'S': - return value.astype('U').tolist() - if hasattr(value, 'tolist'): - return value.tolist() - if hasattr(value, 'decode') and not hasattr(value, 'encode'): - return value.decode('UTF8') - return value - - -def spiketrain_to_dataframe(spiketrain, parents=True, child_first=True): - """Convert a `neo.SpikeTrain` to a `pandas.DataFrame`. - - The `pandas.DataFrame` object has a single column, with each element - being the spike time converted to a `float` value in seconds. - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations. The `index` - is the spike number. - - Parameters - ---------- - - spiketrain : neo SpikeTrain - The SpikeTrain to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - - Returns - ------- - - pandas DataFrame - A DataFrame containing the spike times from `spiketrain`. - - Notes - ----- - - The index name is `spike_number`. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - attrs = _extract_neo_attrs_safe(spiketrain, - parents=parents, child_first=child_first) - columns = _multiindex_from_dict(attrs) - - times = spiketrain.magnitude - times = pq.Quantity(times, spiketrain.units).rescale('s').magnitude - times = times[np.newaxis].T - - index = pd.Index(np.arange(len(spiketrain)), name='spike_number') - - pdobj = pd.DataFrame(times, index=index, columns=columns) - return _sort_inds(pdobj, axis=1) - - -def event_to_dataframe(event, parents=True, child_first=True): - """Convert a `neo.core.Event` to a `pandas.DataFrame`. - - The `pandas.DataFrame` object has a single column, with each element - being the event label from the `event.label` attribute. - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations. The `index` - is the time stamp from the `event.times` attribute. - - Parameters - ---------- - - event : neo Event - The Event to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - pandas DataFrame - A DataFrame containing the labels from `event`. - - Notes - ----- - - If the length of event.times and event.labels are not the same, - the longer will be truncated to the length of the shorter. - - The index name is `times`. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - attrs = _extract_neo_attrs_safe(event, - parents=parents, child_first=child_first) - columns = _multiindex_from_dict(attrs) - - times = event.times.rescale('s').magnitude - labels = event.labels.astype('U') - - times = times[:len(labels)] - labels = labels[:len(times)] - - index = pd.Index(times, name='times') - - pdobj = pd.DataFrame(labels[np.newaxis].T, index=index, columns=columns) - return _sort_inds(pdobj, axis=1) - - -def epoch_to_dataframe(epoch, parents=True, child_first=True): - """Convert a `neo.core.Epoch` to a `pandas.DataFrame`. - - The `pandas.DataFrame` object has a single column, with each element - being the epoch label from the `epoch.label` attribute. - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations. The `index` - is a `pandas.MultiIndex`, with the first index being the time stamp from - the `epoch.times` attribute and the second being the duration from - the `epoch.durations` attribute. - - Parameters - ---------- - - epoch : neo Epoch - The Epoch to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - pandas DataFrame - A DataFrame containing the labels from `epoch`. - - Notes - ----- - - If the length of `epoch.times`, `epoch.duration`, and `epoch.labels` are - not the same, the longer will be truncated to the length of the shortest. - - The index names for `epoch.times` and `epoch.durations` are `times` and - `durations`, respectively. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - attrs = _extract_neo_attrs_safe(epoch, - parents=parents, child_first=child_first) - columns = _multiindex_from_dict(attrs) - - times = epoch.times.rescale('s').magnitude - durs = epoch.durations.rescale('s').magnitude - labels = epoch.labels.astype('U') - - minlen = min([len(durs), len(times), len(labels)]) - index = pd.MultiIndex.from_arrays([times[:minlen], durs[:minlen]], - names=['times', 'durations']) - - pdobj = pd.DataFrame(labels[:minlen][np.newaxis].T, - index=index, columns=columns) - return _sort_inds(pdobj, axis='all') - - -def _multi_objs_to_dataframe(container, conv_func, get_func, - parents=True, child_first=True): - """Convert one or more of a given `neo` object to a `pandas.DataFrame`. - - The objects can be any list, dict, or other iterable or mapping containing - the object, as well as any neo object that can hold the object. - Objects are searched recursively, so the objects can be nested (such as a - list of blocks). - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations of the respective - object. - - Parameters - ---------- - - container : list, tuple, iterable, dict, neo container object - The container for the objects to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - pandas DataFrame - A DataFrame containing the converted objects. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - res = pd.concat([conv_func(obj, parents=parents, child_first=child_first) - for obj in get_func(container)], axis=1) - return _sort_inds(res, axis=1) - - -def multi_spiketrains_to_dataframe(container, - parents=True, child_first=True): - """Convert one or more `neo.SpikeTrain` objects to a `pandas.DataFrame`. - - The objects can be any list, dict, or other iterable or mapping containing - spiketrains, as well as any neo object that can hold spiketrains: - `neo.Block`, `neo.ChannelIndex`, `neo.Unit`, and `neo.Segment`. - Objects are searched recursively, so the objects can be nested (such as a - list of blocks). - - The `pandas.DataFrame` object has one column for each spiketrain, with each - element being the spike time converted to a `float` value in seconds. - columns are padded to the same length with `NaN` values. - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations of the respective - spiketrain. The `index` is the spike number. - - Parameters - ---------- - - container : list, tuple, iterable, dict, - neo Block, neo Segment, neo Unit, neo ChannelIndex - The container for the spiketrains to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - pandas DataFrame - A DataFrame containing the spike times from `container`. - - Notes - ----- - - The index name is `spike_number`. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - return _multi_objs_to_dataframe(container, - spiketrain_to_dataframe, - get_all_spiketrains, - parents=parents, child_first=child_first) - - -def multi_events_to_dataframe(container, parents=True, child_first=True): - """Convert one or more `neo.Event` objects to a `pandas.DataFrame`. - - The objects can be any list, dict, or other iterable or mapping containing - events, as well as any neo object that can hold events: - `neo.Block` and `neo.Segment`. Objects are searched recursively, so the - objects can be nested (such as a list of blocks). - - The `pandas.DataFrame` object has one column for each event, with each - element being the event label. columns are padded to the same length with - `NaN` values. - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations of the respective - event. The `index` is the time stamp from the `event.times` attribute. - - Parameters - ---------- - - container : list, tuple, iterable, dict, neo Block, neo Segment - The container for the events to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - pandas DataFrame - A DataFrame containing the labels from `container`. - - Notes - ----- - - If the length of event.times and event.labels are not the same for any - individual event, the longer will be truncated to the length of the - shorter for that event. Between events, lengths can differ. - - The index name is `times`. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - return _multi_objs_to_dataframe(container, - event_to_dataframe, get_all_events, - parents=parents, child_first=child_first) - - -def multi_epochs_to_dataframe(container, parents=True, child_first=True): - """Convert one or more `neo.Epoch` objects to a `pandas.DataFrame`. - - The objects can be any list, dict, or other iterable or mapping containing - epochs, as well as any neo object that can hold epochs: - `neo.Block` and `neo.Segment`. Objects are searched recursively, so the - objects can be nested (such as a list of blocks). - - The `pandas.DataFrame` object has one column for each epoch, with each - element being the epoch label. columns are padded to the same length with - `NaN` values. - - The column heading is a `pandas.MultiIndex` with one index - for each of the scalar attributes and annotations of the respective - epoch. The `index` is a `pandas.MultiIndex`, with the first index being - the time stamp from the `epoch.times` attribute and the second being the - duration from the `epoch.durations` attribute. - - Parameters - ---------- - - container : list, tuple, iterable, dict, neo Block, neo Segment - The container for the epochs to convert. - parents : bool, optional - Also include attributes and annotations from parent neo - objects (if any). - child_first : bool, optional - If True (default True), values of child attributes are used - over parent attributes in the event of a name conflict. - If False, parent attributes are used. - This parameter does nothing if `parents` is False. - - Returns - ------- - - pandas DataFrame - A DataFrame containing the labels from `container`. - - Notes - ----- - - If the length of `epoch.times`, `epoch.duration`, and `epoch.labels` are - not the same for any individual epoch, the longer will be truncated to the - length of the shorter for that epoch. Between epochs, lengths can differ. - - The index level names for `epoch.times` and `epoch.durations` are - `times` and `durations`, respectively. - - Attributes that contain non-scalar values are skipped. So are - annotations or attributes containing a value of `None`. - - `quantity.Quantities` types are incompatible with `pandas`, so attributes - and annotations of that type are converted to a tuple where the first - element is the scalar value and the second is the string representation of - the units. - - """ - return _multi_objs_to_dataframe(container, - epoch_to_dataframe, get_all_epochs, - parents=parents, child_first=child_first) - - -def slice_spiketrain(pdobj, t_start=None, t_stop=None): - """Slice a `pandas.DataFrame`, changing indices appropriately. - - Values outside the sliced range are converted to `NaN` values. - - Slicing happens over columns. - - This sets the `t_start` and `t_stop` column indexes to be the new values. - Otherwise it is the same as setting values outside the range to `NaN`. - - Parameters - ---------- - pdobj : pandas DataFrame - The DataFrame to slice. - t_start : float, optional. - If specified, the returned DataFrame values less than this set - to `NaN`. - Default is `None` (do not use this argument). - t_stop : float, optional. - If specified, the returned DataFrame values greater than this set - to `NaN`. - Default is `None` (do not use this argument). - - Returns - ------- - - pdobj : scalar, pandas Series, DataFrame, or Panel - The returned data type is the same as the type of `pdobj` - - Notes - ----- - - The order of the index and/or column levels of the returned object may - differ from the order of the original. - - If `t_start` or `t_stop` is specified, all columns indexes will be changed - to the respective values, including those already within the new range. - If `t_start` or `t_stop` is not specified, those column indexes will not - be changed. - - Returns a copy, even if `t_start` and `t_stop` are both `None`. - - """ - if t_start is None and t_stop is None: - return pdobj.copy() - - if t_stop is not None: - pdobj[pdobj > t_stop] = np.nan - - pdobj = pdobj.T.reset_index(level='t_stop') - pdobj['t_stop'] = t_stop - pdobj = pdobj.set_index('t_stop', append=True).T - pdobj = _sort_inds(pdobj, axis=1) - - if t_start is not None: - pdobj[pdobj < t_start] = np.nan - - pdobj = pdobj.T.reset_index(level='t_start') - pdobj['t_start'] = t_start - pdobj = pdobj.set_index('t_start', append=True).T - pdobj = _sort_inds(pdobj, axis=1) - - return pdobj diff --git a/elephant/test/test_pandas_bridge.py b/elephant/test/test_pandas_bridge.py deleted file mode 100644 index 6394d83da..000000000 --- a/elephant/test/test_pandas_bridge.py +++ /dev/null @@ -1,3092 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Unit tests for the pandas bridge module. - -:copyright: Copyright 2014-2023 by the Elephant team, see `doc/authors.rst`. -:license: Modified BSD, see LICENSE.txt for details. -""" - -import unittest -import warnings -from distutils.version import StrictVersion -from itertools import chain - -import numpy as np -import quantities as pq -import neo.core -from neo.test.generate_datasets import generate_one_simple_block, \ - generate_one_simple_segment, \ - random_event, random_epoch, random_spiketrain -from numpy.testing import assert_array_equal - -try: - import pandas as pd - from pandas.util.testing import assert_frame_equal, assert_index_equal -except ImportError: - HAVE_PANDAS = False - pandas_version = StrictVersion('0.0.0') -else: - import elephant.pandas_bridge as ep - - HAVE_PANDAS = True - pandas_version = StrictVersion(pd.__version__) - -if HAVE_PANDAS: - # Currying, otherwise the unittest will break with pandas>=0.16.0 - # parameter check_names is introduced in a newer versions than 0.14.0 - # this test is written for pandas 0.14.0 - def assert_index_equal(left, right): - try: - # pandas>=0.16.0 - return pd.util.testing.assert_index_equal(left, right, - check_names=False) - except TypeError: - # pandas older version - return pd.util.testing.assert_index_equal(left, right) - - -@unittest.skipUnless(pandas_version >= '0.24.0', 'requires pandas v0.24.0') -class MultiindexFromDictTestCase(unittest.TestCase): - def test__multiindex_from_dict(self): - inds = {'test1': 6.5, - 'test2': 5, - 'test3': 'test'} - targ = pd.MultiIndex(levels=[[6.5], [5], ['test']], - codes=[[0], [0], [0]], - names=['test1', 'test2', 'test3']) - res0 = ep._multiindex_from_dict(inds) - self.assertEqual(targ.levels, res0.levels) - self.assertEqual(targ.names, res0.names) - self.assertEqual(targ.codes, res0.codes) - - -def _convert_levels(levels): - """Convert a list of levels to the format pandas returns for a MultiIndex. - - Parameters - ---------- - - levels : list - The list of levels to convert. - - Returns - ------- - - list - The the level in `list` converted to values like what pandas will give. - - """ - levels = list(levels) - for i, level in enumerate(levels): - if hasattr(level, 'lower'): - try: - level = unicode(level) - except NameError: - pass - elif hasattr(level, 'date'): - levels[i] = pd.DatetimeIndex(data=[level]) - continue - elif level is None: - levels[i] = pd.Index([]) - continue - - # pd.Index around pd.Index to convert to Index structure if MultiIndex - levels[i] = pd.Index(pd.Index([level])) - return levels - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class ConvertValueSafeTestCase(unittest.TestCase): - def test__convert_value_safe__float(self): - targ = 5.5 - value = targ - - res = ep._convert_value_safe(value) - - self.assertIs(res, targ) - - def test__convert_value_safe__str(self): - targ = 'test' - value = targ - - res = ep._convert_value_safe(value) - - self.assertIs(res, targ) - - def test__convert_value_safe__bytes(self): - targ = 'test' - value = b'test' - - res = ep._convert_value_safe(value) - - self.assertEqual(res, targ) - - def test__convert_value_safe__numpy_int_scalar(self): - targ = 5 - value = np.array(5) - - res = ep._convert_value_safe(value) - - self.assertEqual(res, targ) - self.assertFalse(hasattr(res, 'dtype')) - - def test__convert_value_safe__numpy_float_scalar(self): - targ = 5. - value = np.array(5.) - - res = ep._convert_value_safe(value) - - self.assertEqual(res, targ) - self.assertFalse(hasattr(res, 'dtype')) - - def test__convert_value_safe__numpy_unicode_scalar(self): - targ = u'test' - value = np.array('test', dtype='U') - - res = ep._convert_value_safe(value) - - self.assertEqual(res, targ) - self.assertFalse(hasattr(res, 'dtype')) - - def test__convert_value_safe__numpy_str_scalar(self): - targ = u'test' - value = np.array('test', dtype='S') - - res = ep._convert_value_safe(value) - - self.assertEqual(res, targ) - self.assertFalse(hasattr(res, 'dtype')) - - def test__convert_value_safe__quantity_scalar(self): - targ = (10., 'ms') - value = 10. * pq.ms - - res = ep._convert_value_safe(value) - - self.assertEqual(res, targ) - self.assertFalse(hasattr(res[0], 'dtype')) - self.assertFalse(hasattr(res[0], 'units')) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class SpiketrainToDataframeTestCase(unittest.TestCase): - def test__spiketrain_to_dataframe__parents_empty(self): - obj = random_spiketrain() - - res0 = ep.spiketrain_to_dataframe(obj) - res1 = ep.spiketrain_to_dataframe(obj, child_first=True) - res2 = ep.spiketrain_to_dataframe(obj, child_first=False) - res3 = ep.spiketrain_to_dataframe(obj, parents=True) - res4 = ep.spiketrain_to_dataframe(obj, parents=True, - child_first=True) - res5 = ep.spiketrain_to_dataframe(obj, parents=True, - child_first=False) - res6 = ep.spiketrain_to_dataframe(obj, parents=False) - res7 = ep.spiketrain_to_dataframe(obj, parents=False, child_first=True) - res8 = ep.spiketrain_to_dataframe(obj, parents=False, - child_first=False) - - targvalues = pq.Quantity(obj.magnitude, units=obj.units) - targvalues = targvalues.rescale('s').magnitude[np.newaxis].T - targindex = np.arange(len(targvalues)) - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - self.assertEqual(1, len(res3.columns)) - self.assertEqual(1, len(res4.columns)) - self.assertEqual(1, len(res5.columns)) - self.assertEqual(1, len(res6.columns)) - self.assertEqual(1, len(res7.columns)) - self.assertEqual(1, len(res8.columns)) - - self.assertEqual(len(obj), len(res0.index)) - self.assertEqual(len(obj), len(res1.index)) - self.assertEqual(len(obj), len(res2.index)) - self.assertEqual(len(obj), len(res3.index)) - self.assertEqual(len(obj), len(res4.index)) - self.assertEqual(len(obj), len(res5.index)) - self.assertEqual(len(obj), len(res6.index)) - self.assertEqual(len(obj), len(res7.index)) - self.assertEqual(len(obj), len(res8.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - assert_array_equal(targvalues, res3.values) - assert_array_equal(targvalues, res4.values) - assert_array_equal(targvalues, res5.values) - assert_array_equal(targvalues, res6.values) - assert_array_equal(targvalues, res7.values) - assert_array_equal(targvalues, res8.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - assert_array_equal(targindex, res2.index) - assert_array_equal(targindex, res3.index) - assert_array_equal(targindex, res4.index) - assert_array_equal(targindex, res5.index) - assert_array_equal(targindex, res6.index) - assert_array_equal(targindex, res7.index) - assert_array_equal(targindex, res8.index) - - self.assertEqual(['spike_number'], res0.index.names) - self.assertEqual(['spike_number'], res1.index.names) - self.assertEqual(['spike_number'], res2.index.names) - self.assertEqual(['spike_number'], res3.index.names) - self.assertEqual(['spike_number'], res4.index.names) - self.assertEqual(['spike_number'], res5.index.names) - self.assertEqual(['spike_number'], res6.index.names) - self.assertEqual(['spike_number'], res7.index.names) - self.assertEqual(['spike_number'], res8.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - self.assertEqual(keys, res3.columns.names) - self.assertEqual(keys, res4.columns.names) - self.assertEqual(keys, res5.columns.names) - self.assertEqual(keys, res6.columns.names) - self.assertEqual(keys, res7.columns.names) - self.assertEqual(keys, res8.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res3.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res4.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res5.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res6.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res7.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res8.columns.levels): - assert_index_equal(value, level) - - def test__spiketrain_to_dataframe__noparents(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - obj = blk.list_children_by_class('SpikeTrain')[0] - - res0 = ep.spiketrain_to_dataframe(obj, parents=False) - res1 = ep.spiketrain_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.spiketrain_to_dataframe(obj, parents=False, - child_first=False) - - targvalues = pq.Quantity(obj.magnitude, units=obj.units) - targvalues = targvalues.rescale('s').magnitude[np.newaxis].T - targindex = np.arange(len(targvalues)) - - attrs = ep._extract_neo_attrs_safe(obj, parents=False, - child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - - self.assertEqual(len(obj), len(res0.index)) - self.assertEqual(len(obj), len(res1.index)) - self.assertEqual(len(obj), len(res2.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - assert_array_equal(targindex, res2.index) - - self.assertEqual(['spike_number'], res0.index.names) - self.assertEqual(['spike_number'], res1.index.names) - self.assertEqual(['spike_number'], res2.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - - def test__spiketrain_to_dataframe__parents_childfirst(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - obj = blk.list_children_by_class('SpikeTrain')[0] - res0 = ep.spiketrain_to_dataframe(obj) - res1 = ep.spiketrain_to_dataframe(obj, child_first=True) - res2 = ep.spiketrain_to_dataframe(obj, parents=True) - res3 = ep.spiketrain_to_dataframe(obj, parents=True, child_first=True) - - targvalues = pq.Quantity(obj.magnitude, units=obj.units) - targvalues = targvalues.rescale('s').magnitude[np.newaxis].T - targindex = np.arange(len(targvalues)) - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - self.assertEqual(1, len(res3.columns)) - - self.assertEqual(len(obj), len(res0.index)) - self.assertEqual(len(obj), len(res1.index)) - self.assertEqual(len(obj), len(res2.index)) - self.assertEqual(len(obj), len(res3.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - assert_array_equal(targvalues, res3.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - assert_array_equal(targindex, res2.index) - assert_array_equal(targindex, res3.index) - - self.assertEqual(['spike_number'], res0.index.names) - self.assertEqual(['spike_number'], res1.index.names) - self.assertEqual(['spike_number'], res2.index.names) - self.assertEqual(['spike_number'], res3.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - self.assertEqual(keys, res3.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res3.columns.levels): - assert_index_equal(value, level) - - def test__spiketrain_to_dataframe__parents_parentfirst(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - obj = blk.list_children_by_class('SpikeTrain')[0] - res0 = ep.spiketrain_to_dataframe(obj, child_first=False) - res1 = ep.spiketrain_to_dataframe(obj, parents=True, child_first=False) - - targvalues = pq.Quantity(obj.magnitude, units=obj.units) - targvalues = targvalues.rescale('s').magnitude[np.newaxis].T - targindex = np.arange(len(targvalues)) - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=False) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - - self.assertEqual(len(obj), len(res0.index)) - self.assertEqual(len(obj), len(res1.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - - self.assertEqual(['spike_number'], res0.index.names) - self.assertEqual(['spike_number'], res1.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class EventToDataframeTestCase(unittest.TestCase): - def test__event_to_dataframe__parents_empty(self): - obj = random_event() - - res0 = ep.event_to_dataframe(obj) - res1 = ep.event_to_dataframe(obj, child_first=True) - res2 = ep.event_to_dataframe(obj, child_first=False) - res3 = ep.event_to_dataframe(obj, parents=True) - res4 = ep.event_to_dataframe(obj, parents=True, child_first=True) - res5 = ep.event_to_dataframe(obj, parents=True, child_first=False) - res6 = ep.event_to_dataframe(obj, parents=False) - res7 = ep.event_to_dataframe(obj, parents=False, child_first=True) - res8 = ep.event_to_dataframe(obj, parents=False, child_first=False) - - targvalues = obj.labels[:len(obj.times)][np.newaxis].T.astype('U') - targindex = obj.times[:len(obj.labels)].rescale('s').magnitude - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - self.assertEqual(1, len(res3.columns)) - self.assertEqual(1, len(res4.columns)) - self.assertEqual(1, len(res5.columns)) - self.assertEqual(1, len(res6.columns)) - self.assertEqual(1, len(res7.columns)) - self.assertEqual(1, len(res8.columns)) - - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res1.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res2.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res3.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res4.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res5.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res6.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res7.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res8.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - assert_array_equal(targvalues, res3.values) - assert_array_equal(targvalues, res4.values) - assert_array_equal(targvalues, res5.values) - assert_array_equal(targvalues, res6.values) - assert_array_equal(targvalues, res7.values) - assert_array_equal(targvalues, res8.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - assert_array_equal(targindex, res2.index) - assert_array_equal(targindex, res3.index) - assert_array_equal(targindex, res4.index) - assert_array_equal(targindex, res5.index) - assert_array_equal(targindex, res6.index) - assert_array_equal(targindex, res7.index) - assert_array_equal(targindex, res8.index) - - self.assertEqual(['times'], res0.index.names) - self.assertEqual(['times'], res1.index.names) - self.assertEqual(['times'], res2.index.names) - self.assertEqual(['times'], res3.index.names) - self.assertEqual(['times'], res4.index.names) - self.assertEqual(['times'], res5.index.names) - self.assertEqual(['times'], res6.index.names) - self.assertEqual(['times'], res7.index.names) - self.assertEqual(['times'], res8.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - self.assertEqual(keys, res3.columns.names) - self.assertEqual(keys, res4.columns.names) - self.assertEqual(keys, res5.columns.names) - self.assertEqual(keys, res6.columns.names) - self.assertEqual(keys, res7.columns.names) - self.assertEqual(keys, res8.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res3.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res4.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res5.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res6.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res7.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res8.columns.levels): - assert_index_equal(value, level) - - def test__event_to_dataframe__noparents(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('Event'): - objs.annotate(test=5) - obj = blk.list_children_by_class('Event')[0] - - res0 = ep.event_to_dataframe(obj, parents=False) - res1 = ep.event_to_dataframe(obj, parents=False, child_first=False) - res2 = ep.event_to_dataframe(obj, parents=False, child_first=True) - - targvalues = obj.labels[:len(obj.times)][np.newaxis].T.astype('U') - targindex = obj.times[:len(obj.labels)].rescale('s').magnitude - - attrs = ep._extract_neo_attrs_safe(obj, parents=False, - child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res1.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res2.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - assert_array_equal(targindex, res2.index) - - self.assertEqual(['times'], res0.index.names) - self.assertEqual(['times'], res1.index.names) - self.assertEqual(['times'], res2.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - - def test__event_to_dataframe__parents_childfirst(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('Event'): - objs.annotate(test=5) - obj = blk.list_children_by_class('Event')[0] - - res0 = ep.event_to_dataframe(obj) - res1 = ep.event_to_dataframe(obj, child_first=True) - res2 = ep.event_to_dataframe(obj, parents=True) - res3 = ep.event_to_dataframe(obj, parents=True, child_first=True) - - targvalues = obj.labels[:len(obj.times)][np.newaxis].T.astype('U') - targindex = obj.times[:len(obj.labels)].rescale('s').magnitude - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - self.assertEqual(1, len(res3.columns)) - - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res1.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res2.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res3.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - assert_array_equal(targvalues, res3.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - assert_array_equal(targindex, res2.index) - assert_array_equal(targindex, res3.index) - - self.assertEqual(['times'], res0.index.names) - self.assertEqual(['times'], res1.index.names) - self.assertEqual(['times'], res2.index.names) - self.assertEqual(['times'], res3.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - self.assertEqual(keys, res3.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res3.columns.levels): - assert_index_equal(value, level) - - def test__event_to_dataframe__parents_parentfirst(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('Event'): - objs.annotate(test=5) - obj = blk.list_children_by_class('Event')[0] - res0 = ep.event_to_dataframe(obj, child_first=False) - res1 = ep.event_to_dataframe(obj, parents=True, child_first=False) - - targvalues = obj.labels[:len(obj.times)][np.newaxis].T.astype('U') - targindex = obj.times[:len(obj.labels)].rescale('s').magnitude - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=False) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.labels)), - len(res1.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - - assert_array_equal(targindex, res0.index) - assert_array_equal(targindex, res1.index) - - self.assertEqual(['times'], res0.index.names) - self.assertEqual(['times'], res1.index.names) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class EpochToDataframeTestCase(unittest.TestCase): - def test__epoch_to_dataframe__parents_empty(self): - obj = random_epoch() - - res0 = ep.epoch_to_dataframe(obj) - res1 = ep.epoch_to_dataframe(obj, child_first=True) - res2 = ep.epoch_to_dataframe(obj, child_first=False) - res3 = ep.epoch_to_dataframe(obj, parents=True) - res4 = ep.epoch_to_dataframe(obj, parents=True, child_first=True) - res5 = ep.epoch_to_dataframe(obj, parents=True, child_first=False) - res6 = ep.epoch_to_dataframe(obj, parents=False) - res7 = ep.epoch_to_dataframe(obj, parents=False, child_first=True) - res8 = ep.epoch_to_dataframe(obj, parents=False, child_first=False) - - minlen = min([len(obj.times), len(obj.durations), len(obj.labels)]) - targvalues = obj.labels[:minlen][np.newaxis].T.astype('U') - targindex = np.vstack([obj.durations[:minlen].rescale('s').magnitude, - obj.times[:minlen].rescale('s').magnitude]) - targvalues = targvalues[targindex.argsort()[0], :] - targindex.sort() - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - self.assertEqual(1, len(res3.columns)) - self.assertEqual(1, len(res4.columns)) - self.assertEqual(1, len(res5.columns)) - self.assertEqual(1, len(res6.columns)) - self.assertEqual(1, len(res7.columns)) - self.assertEqual(1, len(res8.columns)) - - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res1.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res2.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res3.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res4.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res5.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res6.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res7.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res8.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - assert_array_equal(targvalues, res3.values) - assert_array_equal(targvalues, res4.values) - assert_array_equal(targvalues, res5.values) - assert_array_equal(targvalues, res6.values) - assert_array_equal(targvalues, res7.values) - assert_array_equal(targvalues, res8.values) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - self.assertEqual(keys, res3.columns.names) - self.assertEqual(keys, res4.columns.names) - self.assertEqual(keys, res5.columns.names) - self.assertEqual(keys, res6.columns.names) - self.assertEqual(keys, res7.columns.names) - self.assertEqual(keys, res8.columns.names) - - self.assertEqual([u'durations', u'times'], res0.index.names) - self.assertEqual([u'durations', u'times'], res1.index.names) - self.assertEqual([u'durations', u'times'], res2.index.names) - self.assertEqual([u'durations', u'times'], res3.index.names) - self.assertEqual([u'durations', u'times'], res4.index.names) - self.assertEqual([u'durations', u'times'], res5.index.names) - self.assertEqual([u'durations', u'times'], res6.index.names) - self.assertEqual([u'durations', u'times'], res7.index.names) - self.assertEqual([u'durations', u'times'], res8.index.names) - - self.assertEqual(2, len(res0.index.levels)) - self.assertEqual(2, len(res1.index.levels)) - self.assertEqual(2, len(res2.index.levels)) - self.assertEqual(2, len(res3.index.levels)) - self.assertEqual(2, len(res4.index.levels)) - self.assertEqual(2, len(res5.index.levels)) - self.assertEqual(2, len(res6.index.levels)) - self.assertEqual(2, len(res7.index.levels)) - self.assertEqual(2, len(res8.index.levels)) - - assert_array_equal(targindex, res0.index.levels) - assert_array_equal(targindex, res1.index.levels) - assert_array_equal(targindex, res2.index.levels) - assert_array_equal(targindex, res3.index.levels) - assert_array_equal(targindex, res4.index.levels) - assert_array_equal(targindex, res5.index.levels) - assert_array_equal(targindex, res6.index.levels) - assert_array_equal(targindex, res7.index.levels) - assert_array_equal(targindex, res8.index.levels) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res3.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res4.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res5.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res6.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res7.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res8.columns.levels): - assert_index_equal(value, level) - - def test__epoch_to_dataframe__noparents(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('Epoch'): - objs.annotate(test=5) - obj = blk.list_children_by_class('Epoch')[0] - - res0 = ep.epoch_to_dataframe(obj, parents=False) - res1 = ep.epoch_to_dataframe(obj, parents=False, child_first=True) - res2 = ep.epoch_to_dataframe(obj, parents=False, child_first=False) - - minlen = min([len(obj.times), len(obj.durations), len(obj.labels)]) - targvalues = obj.labels[:minlen][np.newaxis].T.astype('U') - targindex = np.vstack([obj.durations[:minlen].rescale('s').magnitude, - obj.times[:minlen].rescale('s').magnitude]) - targvalues = targvalues[targindex.argsort()[0], :] - targindex.sort() - - attrs = ep._extract_neo_attrs_safe(obj, parents=False, - child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res1.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res2.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - - self.assertEqual([u'durations', u'times'], res0.index.names) - self.assertEqual([u'durations', u'times'], res1.index.names) - self.assertEqual([u'durations', u'times'], res2.index.names) - - self.assertEqual(2, len(res0.index.levels)) - self.assertEqual(2, len(res1.index.levels)) - self.assertEqual(2, len(res2.index.levels)) - - assert_array_equal(targindex, res0.index.levels) - assert_array_equal(targindex, res1.index.levels) - assert_array_equal(targindex, res2.index.levels) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - - def test__epoch_to_dataframe__parents_childfirst(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('Epoch'): - objs.annotate(test=5) - obj = blk.list_children_by_class('Epoch')[0] - - res0 = ep.epoch_to_dataframe(obj) - res1 = ep.epoch_to_dataframe(obj, child_first=True) - res2 = ep.epoch_to_dataframe(obj, parents=True) - res3 = ep.epoch_to_dataframe(obj, parents=True, child_first=True) - - minlen = min([len(obj.times), len(obj.durations), len(obj.labels)]) - targvalues = obj.labels[:minlen][np.newaxis].T.astype('U') - targindex = np.vstack([obj.durations[:minlen].rescale('s').magnitude, - obj.times[:minlen].rescale('s').magnitude]) - targvalues = targvalues[targindex.argsort()[0], :] - targindex.sort() - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, child_first=True) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - self.assertEqual(1, len(res2.columns)) - self.assertEqual(1, len(res3.columns)) - - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res1.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res2.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res3.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - assert_array_equal(targvalues, res2.values) - assert_array_equal(targvalues, res3.values) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - self.assertEqual(keys, res2.columns.names) - self.assertEqual(keys, res3.columns.names) - - self.assertEqual([u'durations', u'times'], res0.index.names) - self.assertEqual([u'durations', u'times'], res1.index.names) - self.assertEqual([u'durations', u'times'], res2.index.names) - self.assertEqual([u'durations', u'times'], res3.index.names) - - self.assertEqual(2, len(res0.index.levels)) - self.assertEqual(2, len(res1.index.levels)) - self.assertEqual(2, len(res2.index.levels)) - self.assertEqual(2, len(res3.index.levels)) - - assert_array_equal(targindex, res0.index.levels) - assert_array_equal(targindex, res1.index.levels) - assert_array_equal(targindex, res2.index.levels) - assert_array_equal(targindex, res3.index.levels) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res2.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res3.columns.levels): - assert_index_equal(value, level) - - def test__epoch_to_dataframe__parents_parentfirst(self): - blk = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in blk.list_children_by_class('Epoch'): - objs.annotate(test=5) - obj = blk.list_children_by_class('Epoch')[0] - - res0 = ep.epoch_to_dataframe(obj, child_first=False) - res1 = ep.epoch_to_dataframe(obj, parents=True, child_first=False) - - minlen = min([len(obj.times), len(obj.durations), len(obj.labels)]) - targvalues = obj.labels[:minlen][np.newaxis].T.astype('U') - targindex = np.vstack([obj.durations[:minlen].rescale('s').magnitude, - obj.times[:minlen].rescale('s').magnitude]) - targvalues = targvalues[targindex.argsort()[0], :] - targindex.sort() - - attrs = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=False) - keys, values = zip(*sorted(attrs.items())) - values = _convert_levels(values) - - self.assertEqual(1, len(res0.columns)) - self.assertEqual(1, len(res1.columns)) - - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res0.index)) - self.assertEqual(min(len(obj.times), len(obj.durations), - len(obj.labels)), - len(res1.index)) - - assert_array_equal(targvalues, res0.values) - assert_array_equal(targvalues, res1.values) - - self.assertEqual(keys, res0.columns.names) - self.assertEqual(keys, res1.columns.names) - - self.assertEqual([u'durations', u'times'], res0.index.names) - self.assertEqual([u'durations', u'times'], res1.index.names) - - self.assertEqual(2, len(res0.index.levels)) - self.assertEqual(2, len(res1.index.levels)) - - assert_array_equal(targindex, res0.index.levels) - assert_array_equal(targindex, res1.index.levels) - - for value, level in zip(values, res0.columns.levels): - assert_index_equal(value, level) - for value, level in zip(values, res1.columns.levels): - assert_index_equal(value, level) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class MultiSpiketrainsToDataframeTestCase(unittest.TestCase): - def setUp(self): - if hasattr(self, 'assertItemsEqual'): - self.assertCountEqual = self.assertItemsEqual - - def test__multi_spiketrains_to_dataframe__single(self): - obj = random_spiketrain() - - res0 = ep.multi_spiketrains_to_dataframe(obj) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=False) - res2 = ep.multi_spiketrains_to_dataframe(obj, parents=True) - res3 = ep.multi_spiketrains_to_dataframe(obj, child_first=True) - res4 = ep.multi_spiketrains_to_dataframe(obj, parents=False, - child_first=True) - res5 = ep.multi_spiketrains_to_dataframe(obj, parents=True, - child_first=True) - res6 = ep.multi_spiketrains_to_dataframe(obj, child_first=False) - res7 = ep.multi_spiketrains_to_dataframe(obj, parents=False, - child_first=False) - res8 = ep.multi_spiketrains_to_dataframe(obj, parents=True, - child_first=False) - - targ = ep.spiketrain_to_dataframe(obj) - - keys = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = 1 - targlen = len(obj) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - self.assertEqual(targwidth, len(res4.columns)) - self.assertEqual(targwidth, len(res5.columns)) - self.assertEqual(targwidth, len(res6.columns)) - self.assertEqual(targwidth, len(res7.columns)) - self.assertEqual(targwidth, len(res8.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - self.assertEqual(targlen, len(res4.index)) - self.assertEqual(targlen, len(res5.index)) - self.assertEqual(targlen, len(res6.index)) - self.assertEqual(targlen, len(res7.index)) - self.assertEqual(targlen, len(res8.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - self.assertCountEqual(keys, res4.columns.names) - self.assertCountEqual(keys, res5.columns.names) - self.assertCountEqual(keys, res6.columns.names) - self.assertCountEqual(keys, res7.columns.names) - self.assertCountEqual(keys, res8.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - assert_array_equal(targ.values, res3.values) - assert_array_equal(targ.values, res4.values) - assert_array_equal(targ.values, res5.values) - assert_array_equal(targ.values, res6.values) - assert_array_equal(targ.values, res7.values) - assert_array_equal(targ.values, res8.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - assert_frame_equal(targ, res4) - assert_frame_equal(targ, res5) - assert_frame_equal(targ, res6) - assert_frame_equal(targ, res7) - assert_frame_equal(targ, res8) - - def test__multi_spiketrains_to_dataframe__segment_default(self): - obj = generate_one_simple_segment( - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('SpikeTrain'): - objs.annotate(test1=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj) - - objs = obj.spiketrains - - targ = [ep.spiketrain_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - assert_array_equal(targ.values, res0.values) - - assert_frame_equal(targ, res0) - - def test__multi_spiketrains_to_dataframe__block_noparents(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('SpikeTrain'): - objs.annotate(test1=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj, parents=False) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.multi_spiketrains_to_dataframe(obj, parents=False, - child_first=False) - - objs = obj.list_children_by_class('SpikeTrain') - - targ = [ep.spiketrain_to_dataframe(iobj, - parents=False, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=False, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - - def test__multi_spiketrains_to_dataframe__block_parents_childfirst(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('SpikeTrain'): - objs.annotate(test1=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=True) - res2 = ep.multi_spiketrains_to_dataframe(obj, child_first=True) - res3 = ep.multi_spiketrains_to_dataframe(obj, parents=True, - child_first=True) - - objs = obj.list_children_by_class('SpikeTrain') - - targ = [ep.spiketrain_to_dataframe(iobj, - parents=True, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - assert_array_equal(targ.values, res3.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - def test__multi_spiketrains_to_dataframe__block_parents_parentfirst(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('SpikeTrain'): - objs.annotate(test1=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj, child_first=False) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=True, - child_first=False) - - objs = obj.list_children_by_class('SpikeTrain') - - targ = [ep.spiketrain_to_dataframe(iobj, - parents=True, child_first=False) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=False).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - - def test__multi_spiketrains_to_dataframe__list_noparents(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj, parents=False) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.multi_spiketrains_to_dataframe(obj, parents=False, - child_first=False) - - objs = (iobj.list_children_by_class('SpikeTrain') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.spiketrain_to_dataframe(iobj, - parents=False, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=False, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - - def test__multi_spiketrains_to_dataframe__list_parents_childfirst(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=True) - res2 = ep.multi_spiketrains_to_dataframe(obj, child_first=True) - res3 = ep.multi_spiketrains_to_dataframe(obj, parents=True, - child_first=True) - - objs = (iobj.list_children_by_class('SpikeTrain') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.spiketrain_to_dataframe(iobj, - parents=True, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - assert_array_equal(targ.values, res3.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - def test__multi_spiketrains_to_dataframe__list_parents_parentfirst(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj, child_first=False) - res1 = ep.multi_spiketrains_to_dataframe(obj, parents=True, - child_first=False) - - objs = (iobj.list_children_by_class('SpikeTrain') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.spiketrain_to_dataframe(iobj, - parents=True, child_first=False) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=False).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - - def test__multi_spiketrains_to_dataframe__tuple_default(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj) - - objs = (iobj.list_children_by_class('SpikeTrain') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.spiketrain_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - assert_array_equal(targ.values, res0.values) - - assert_frame_equal(targ, res0) - - def test__multi_spiketrains_to_dataframe__iter_default(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - - res0 = ep.multi_spiketrains_to_dataframe(iter(obj)) - - objs = (iobj.list_children_by_class('SpikeTrain') for iobj in obj) - objs = list(chain.from_iterable(objs)) - targ = [ep.spiketrain_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - assert_array_equal(targ.values, res0.values) - - assert_frame_equal(targ, res0) - - def test__multi_spiketrains_to_dataframe__dict_default(self): - obj = dict( - ( - i, - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event])) - for i in range(3)) - for iobj in obj: - for objs in obj[iobj].list_children_by_class('SpikeTrain'): - objs.annotate(test=5) - - res0 = ep.multi_spiketrains_to_dataframe(obj) - - objs = (iobj.list_children_by_class('SpikeTrain') for iobj in - obj.values()) - objs = list(chain.from_iterable(objs)) - targ = [ep.spiketrain_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = max(len(iobj) for iobj in objs) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - assert_array_equal(targ.values, res0.values) - - assert_frame_equal(targ, res0) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class MultiEventsToDataframeTestCase(unittest.TestCase): - def setUp(self): - if hasattr(self, 'assertItemsEqual'): - self.assertCountEqual = self.assertItemsEqual - - def test__multi_events_to_dataframe__single(self): - obj = random_event() - - res0 = ep.multi_events_to_dataframe(obj) - res1 = ep.multi_events_to_dataframe(obj, parents=False) - res2 = ep.multi_events_to_dataframe(obj, parents=True) - res3 = ep.multi_events_to_dataframe(obj, child_first=True) - res4 = ep.multi_events_to_dataframe(obj, parents=False, - child_first=True) - res5 = ep.multi_events_to_dataframe(obj, parents=True, - child_first=True) - res6 = ep.multi_events_to_dataframe(obj, child_first=False) - res7 = ep.multi_events_to_dataframe(obj, parents=False, - child_first=False) - res8 = ep.multi_events_to_dataframe(obj, parents=True, - child_first=False) - - targ = ep.event_to_dataframe(obj) - - keys = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = 1 - targlen = min(len(obj.times), len(obj.labels)) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - self.assertEqual(targwidth, len(res4.columns)) - self.assertEqual(targwidth, len(res5.columns)) - self.assertEqual(targwidth, len(res6.columns)) - self.assertEqual(targwidth, len(res7.columns)) - self.assertEqual(targwidth, len(res8.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - self.assertEqual(targlen, len(res4.index)) - self.assertEqual(targlen, len(res5.index)) - self.assertEqual(targlen, len(res6.index)) - self.assertEqual(targlen, len(res7.index)) - self.assertEqual(targlen, len(res8.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - self.assertCountEqual(keys, res4.columns.names) - self.assertCountEqual(keys, res5.columns.names) - self.assertCountEqual(keys, res6.columns.names) - self.assertCountEqual(keys, res7.columns.names) - self.assertCountEqual(keys, res8.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - assert_array_equal(targ.values, res3.values) - assert_array_equal(targ.values, res4.values) - assert_array_equal(targ.values, res5.values) - assert_array_equal(targ.values, res6.values) - assert_array_equal(targ.values, res7.values) - assert_array_equal(targ.values, res8.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - assert_frame_equal(targ, res4) - assert_frame_equal(targ, res5) - assert_frame_equal(targ, res6) - assert_frame_equal(targ, res7) - assert_frame_equal(targ, res8) - - def test__multi_events_to_dataframe__segment_default(self): - obj = generate_one_simple_segment( - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Event'): - objs.annotate(test1=5) - - res0 = ep.multi_events_to_dataframe(obj) - - objs = obj.events - - targ = [ep.event_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - def test__multi_events_to_dataframe__block_noparents(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Event'): - objs.annotate(test1=5) - - res0 = ep.multi_events_to_dataframe(obj, parents=False) - res1 = ep.multi_events_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.multi_events_to_dataframe(obj, parents=False, - child_first=False) - - objs = obj.list_children_by_class('Event') - - targ = [ep.event_to_dataframe(iobj, parents=False, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=False, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - - def test__multi_events_to_dataframe__block_parents_childfirst(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Event'): - objs.annotate(test1=5) - - res0 = ep.multi_events_to_dataframe(obj) - res1 = ep.multi_events_to_dataframe(obj, parents=True) - res2 = ep.multi_events_to_dataframe(obj, child_first=True) - res3 = ep.multi_events_to_dataframe(obj, parents=True, - child_first=True) - - objs = obj.list_children_by_class('Event') - - targ = [ep.event_to_dataframe(iobj, parents=True, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res3.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - def test__multi_events_to_dataframe__block_parents_parentfirst(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Event'): - objs.annotate(test1=5) - - res0 = ep.multi_events_to_dataframe(obj, child_first=False) - res1 = ep.multi_events_to_dataframe(obj, parents=True, - child_first=False) - - objs = obj.list_children_by_class('Event') - - targ = [ep.event_to_dataframe(iobj, parents=True, child_first=False) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=False).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - - def test__multi_events_to_dataframe__list_noparents(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Event'): - objs.annotate(test=5) - - res0 = ep.multi_events_to_dataframe(obj, parents=False) - res1 = ep.multi_events_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.multi_events_to_dataframe(obj, parents=False, - child_first=False) - - objs = (iobj.list_children_by_class('Event') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.event_to_dataframe(iobj, parents=False, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=False, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - - def test__multi_events_to_dataframe__list_parents_childfirst(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Event'): - objs.annotate(test=5) - - res0 = ep.multi_events_to_dataframe(obj) - res1 = ep.multi_events_to_dataframe(obj, parents=True) - res2 = ep.multi_events_to_dataframe(obj, child_first=True) - res3 = ep.multi_events_to_dataframe(obj, parents=True, - child_first=True) - - objs = (iobj.list_children_by_class('Event') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.event_to_dataframe(iobj, parents=True, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res3.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - def test__multi_events_to_dataframe__list_parents_parentfirst(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Event'): - objs.annotate(test=5) - - res0 = ep.multi_events_to_dataframe(obj, child_first=False) - res1 = ep.multi_events_to_dataframe(obj, parents=True, - child_first=False) - - objs = (iobj.list_children_by_class('Event') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.event_to_dataframe(iobj, parents=True, child_first=False) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=False).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - - def test__multi_events_to_dataframe__tuple_default(self): - obj = tuple([ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)]) - for iobj in obj: - for objs in iobj.list_children_by_class('Event'): - objs.annotate(test=5) - - res0 = ep.multi_events_to_dataframe(obj) - - objs = (iobj.list_children_by_class('Event') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.event_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - def test__multi_events_to_dataframe__iter_default(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Event'): - objs.annotate(test=5) - - res0 = ep.multi_events_to_dataframe(iter(obj)) - - objs = (iobj.list_children_by_class('Event') for iobj in obj) - objs = list(chain.from_iterable(objs)) - targ = [ep.event_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - def test__multi_events_to_dataframe__dict_default(self): - obj = dict( - ( - i, - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event])) - for i in range(3)) - for iobj in obj: - for objs in obj[iobj].list_children_by_class('Event'): - objs.annotate(test=5) - - res0 = ep.multi_events_to_dataframe(obj) - - objs = (iobj.list_children_by_class('Event') for iobj in - obj.values()) - objs = list(chain.from_iterable(objs)) - targ = [ep.event_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.labels))] - for iobj in objs] - targlen = len(np.unique(np.hstack(targlen))) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class MultiEpochsToDataframeTestCase(unittest.TestCase): - def setUp(self): - if hasattr(self, 'assertItemsEqual'): - self.assertCountEqual = self.assertItemsEqual - - def test__multi_epochs_to_dataframe__single(self): - obj = random_epoch() - - res0 = ep.multi_epochs_to_dataframe(obj) - res1 = ep.multi_epochs_to_dataframe(obj, parents=False) - res2 = ep.multi_epochs_to_dataframe(obj, parents=True) - res3 = ep.multi_epochs_to_dataframe(obj, child_first=True) - res4 = ep.multi_epochs_to_dataframe(obj, parents=False, - child_first=True) - res5 = ep.multi_epochs_to_dataframe(obj, parents=True, - child_first=True) - res6 = ep.multi_epochs_to_dataframe(obj, child_first=False) - res7 = ep.multi_epochs_to_dataframe(obj, parents=False, - child_first=False) - res8 = ep.multi_epochs_to_dataframe(obj, parents=True, - child_first=False) - - targ = ep.epoch_to_dataframe(obj) - - keys = ep._extract_neo_attrs_safe(obj, parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = 1 - targlen = min(len(obj.times), len(obj.durations), len(obj.labels)) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - self.assertEqual(targwidth, len(res4.columns)) - self.assertEqual(targwidth, len(res5.columns)) - self.assertEqual(targwidth, len(res6.columns)) - self.assertEqual(targwidth, len(res7.columns)) - self.assertEqual(targwidth, len(res8.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - self.assertEqual(targlen, len(res4.index)) - self.assertEqual(targlen, len(res5.index)) - self.assertEqual(targlen, len(res6.index)) - self.assertEqual(targlen, len(res7.index)) - self.assertEqual(targlen, len(res8.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - self.assertCountEqual(keys, res4.columns.names) - self.assertCountEqual(keys, res5.columns.names) - self.assertCountEqual(keys, res6.columns.names) - self.assertCountEqual(keys, res7.columns.names) - self.assertCountEqual(keys, res8.columns.names) - - assert_array_equal(targ.values, res0.values) - assert_array_equal(targ.values, res1.values) - assert_array_equal(targ.values, res2.values) - assert_array_equal(targ.values, res3.values) - assert_array_equal(targ.values, res4.values) - assert_array_equal(targ.values, res5.values) - assert_array_equal(targ.values, res6.values) - assert_array_equal(targ.values, res7.values) - assert_array_equal(targ.values, res8.values) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - assert_frame_equal(targ, res4) - assert_frame_equal(targ, res5) - assert_frame_equal(targ, res6) - assert_frame_equal(targ, res7) - assert_frame_equal(targ, res8) - - def test__multi_epochs_to_dataframe__segment_default(self): - obj = generate_one_simple_segment( - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Epoch'): - objs.annotate(test1=5) - - res0 = ep.multi_epochs_to_dataframe(obj) - - objs = obj.epochs - - targ = [ep.epoch_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - def test__multi_epochs_to_dataframe__block_noparents(self): - obj = generate_one_simple_block( - nb_segment=2, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Epoch'): - objs.annotate(test1=5) - - res0 = ep.multi_epochs_to_dataframe(obj, parents=False) - res1 = ep.multi_epochs_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.multi_epochs_to_dataframe(obj, parents=False, - child_first=False) - - objs = obj.list_children_by_class('Epoch') - - targ = [ep.epoch_to_dataframe(iobj, parents=False, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=False, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - - def test__multi_epochs_to_dataframe__block_parents_childfirst(self): - obj = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj) - res1 = ep.multi_epochs_to_dataframe(obj, parents=True) - res2 = ep.multi_epochs_to_dataframe(obj, child_first=True) - res3 = ep.multi_epochs_to_dataframe(obj, parents=True, - child_first=True) - - objs = obj.list_children_by_class('Epoch') - - targ = [ep.epoch_to_dataframe(iobj, parents=True, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res3.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - def test__multi_epochs_to_dataframe__block_parents_parentfirst(self): - obj = generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for objs in obj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj, child_first=False) - res1 = ep.multi_epochs_to_dataframe(obj, parents=True, - child_first=False) - - objs = obj.list_children_by_class('Epoch') - - targ = [ep.epoch_to_dataframe(iobj, parents=True, child_first=False) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=False).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - - def test__multi_epochs_to_dataframe__list_noparents(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj, parents=False) - res1 = ep.multi_epochs_to_dataframe(obj, parents=False, - child_first=True) - res2 = ep.multi_epochs_to_dataframe(obj, parents=False, - child_first=False) - - objs = (iobj.list_children_by_class('Epoch') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.epoch_to_dataframe(iobj, parents=False, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=False, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - - def test__multi_epochs_to_dataframe__list_parents_childfirst(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj) - res1 = ep.multi_epochs_to_dataframe(obj, parents=True) - res2 = ep.multi_epochs_to_dataframe(obj, child_first=True) - res3 = ep.multi_epochs_to_dataframe(obj, parents=True, - child_first=True) - - objs = (iobj.list_children_by_class('Epoch') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.epoch_to_dataframe(iobj, parents=True, child_first=True) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - self.assertEqual(targwidth, len(res2.columns)) - self.assertEqual(targwidth, len(res3.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - self.assertEqual(targlen, len(res2.index)) - self.assertEqual(targlen, len(res3.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - self.assertCountEqual(keys, res2.columns.names) - self.assertCountEqual(keys, res3.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res2.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res3.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - def test__multi_epochs_to_dataframe__list_parents_parentfirst(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj, child_first=False) - res1 = ep.multi_epochs_to_dataframe(obj, parents=True, - child_first=False) - - objs = (iobj.list_children_by_class('Epoch') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.epoch_to_dataframe(iobj, parents=True, child_first=False) - for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=False).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - self.assertEqual(targwidth, len(res1.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - self.assertEqual(targlen, len(res1.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - self.assertCountEqual(keys, res1.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res1.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - - def test__multi_epochs_to_dataframe__tuple_default(self): - obj = tuple([ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)]) - for iobj in obj: - for objs in iobj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj) - - objs = (iobj.list_children_by_class('Epoch') for iobj in obj) - objs = list(chain.from_iterable(objs)) - - targ = [ep.epoch_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - def test__multi_epochs_to_dataframe__iter_default(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(3)] - for iobj in obj: - for objs in iobj.list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(iter(obj)) - - objs = (iobj.list_children_by_class('Epoch') for iobj in obj) - objs = list(chain.from_iterable(objs)) - targ = [ep.epoch_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - def test__multi_epochs_to_dataframe__dict_default(self): - obj = dict( - ( - i, - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event])) - for i in range(3)) - for iobj in obj: - for objs in obj[iobj].list_children_by_class('Epoch'): - objs.annotate(test=5) - - res0 = ep.multi_epochs_to_dataframe(obj) - - objs = (iobj.list_children_by_class('Epoch') for iobj in - obj.values()) - objs = list(chain.from_iterable(objs)) - targ = [ep.epoch_to_dataframe(iobj) for iobj in objs] - targ = ep._sort_inds(pd.concat(targ, axis=1), axis=1) - - keys = ep._extract_neo_attrs_safe(objs[0], parents=True, - child_first=True).keys() - keys = list(keys) - - targwidth = len(objs) - targlen = [iobj.times[:min(len(iobj.times), len(iobj.durations), - len(iobj.labels))] for iobj in objs] - targlen = len(np.hstack(targlen)) - - self.assertGreater(len(objs), 0) - - self.assertEqual(targwidth, len(targ.columns)) - self.assertEqual(targwidth, len(res0.columns)) - - self.assertEqual(targlen, len(targ.index)) - self.assertEqual(targlen, len(res0.index)) - - self.assertCountEqual(keys, targ.columns.names) - self.assertCountEqual(keys, res0.columns.names) - - # assert_array_equal( - # np.array(targ.values, dtype=np.float), - # np.array(res0.values, dtype=np.float)) - - assert_frame_equal(targ, res0) - - -@unittest.skipUnless(HAVE_PANDAS, 'requires pandas') -class SliceSpiketrainTestCase(unittest.TestCase): - def setUp(self): - obj = [ - generate_one_simple_block( - nb_segment=1, - supported_objects=[ - neo.core.Block, neo.core.Segment, - neo.core.SpikeTrain, neo.core.AnalogSignal, - neo.core.Epoch, neo.core.Event]) - for _ in range(10)] - self.obj = ep.multi_spiketrains_to_dataframe(obj) - - def test_single_none(self): - targ_start = self.obj.columns.get_level_values('t_start').values - targ_stop = self.obj.columns.get_level_values('t_stop').values - - res0 = ep.slice_spiketrain(self.obj) - res1 = ep.slice_spiketrain(self.obj, t_start=None) - res2 = ep.slice_spiketrain(self.obj, t_stop=None) - res3 = ep.slice_spiketrain(self.obj, t_start=None, t_stop=None) - - res0_start = res0.columns.get_level_values('t_start').values - res1_start = res1.columns.get_level_values('t_start').values - res2_start = res2.columns.get_level_values('t_start').values - res3_start = res3.columns.get_level_values('t_start').values - - res0_stop = res0.columns.get_level_values('t_stop').values - res1_stop = res1.columns.get_level_values('t_stop').values - res2_stop = res2.columns.get_level_values('t_stop').values - res3_stop = res3.columns.get_level_values('t_stop').values - targ = self.obj - - self.assertFalse(res0 is targ) - self.assertFalse(res1 is targ) - self.assertFalse(res2 is targ) - self.assertFalse(res3 is targ) - - assert_frame_equal(targ, res0) - assert_frame_equal(targ, res1) - assert_frame_equal(targ, res2) - assert_frame_equal(targ, res3) - - assert_array_equal(targ_start, res0_start) - assert_array_equal(targ_start, res1_start) - assert_array_equal(targ_start, res2_start) - assert_array_equal(targ_start, res3_start) - - assert_array_equal(targ_stop, res0_stop) - assert_array_equal(targ_stop, res1_stop) - assert_array_equal(targ_stop, res2_stop) - assert_array_equal(targ_stop, res3_stop) - - def test_single_t_start(self): - targ_start = .0001 - targ_stop = self.obj.columns.get_level_values('t_stop').values - - res0 = ep.slice_spiketrain(self.obj, t_start=targ_start) - res1 = ep.slice_spiketrain(self.obj, t_start=targ_start, t_stop=None) - - res0_start = res0.columns.get_level_values('t_start').unique().tolist() - res1_start = res1.columns.get_level_values('t_start').unique().tolist() - - res0_stop = res0.columns.get_level_values('t_stop').values - res1_stop = res1.columns.get_level_values('t_stop').values - - targ = self.obj.values - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - # targ already has nan values, ignore comparing with nan - targ[targ < targ_start] = np.nan - - self.assertFalse(res0 is targ) - self.assertFalse(res1 is targ) - - assert_array_equal(targ, res0.values) - assert_array_equal(targ, res1.values) - - self.assertEqual([targ_start], res0_start) - self.assertEqual([targ_start], res1_start) - - assert_array_equal(targ_stop, res0_stop) - assert_array_equal(targ_stop, res1_stop) - - def test_single_t_stop(self): - targ_start = self.obj.columns.get_level_values('t_start').values - targ_stop = .0009 - - res0 = ep.slice_spiketrain(self.obj, t_stop=targ_stop) - res1 = ep.slice_spiketrain(self.obj, t_stop=targ_stop, t_start=None) - - res0_start = res0.columns.get_level_values('t_start').values - res1_start = res1.columns.get_level_values('t_start').values - - res0_stop = res0.columns.get_level_values('t_stop').unique().tolist() - res1_stop = res1.columns.get_level_values('t_stop').unique().tolist() - - targ = self.obj.values - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - # targ already has nan values, ignore comparing with nan - targ[targ > targ_stop] = np.nan - - self.assertFalse(res0 is targ) - self.assertFalse(res1 is targ) - - assert_array_equal(targ, res0.values) - assert_array_equal(targ, res1.values) - - assert_array_equal(targ_start, res0_start) - assert_array_equal(targ_start, res1_start) - - self.assertEqual([targ_stop], res0_stop) - self.assertEqual([targ_stop], res1_stop) - - def test_single_both(self): - targ_start = .0001 - targ_stop = .0009 - - res0 = ep.slice_spiketrain(self.obj, - t_stop=targ_stop, t_start=targ_start) - - res0_start = res0.columns.get_level_values('t_start').unique().tolist() - - res0_stop = res0.columns.get_level_values('t_stop').unique().tolist() - - targ = self.obj.values - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - # targ already has nan values, ignore comparing with nan - targ[targ < targ_start] = np.nan - targ[targ > targ_stop] = np.nan - - self.assertFalse(res0 is targ) - - assert_array_equal(targ, res0.values) - - self.assertEqual([targ_start], res0_start) - - self.assertEqual([targ_stop], res0_stop) - - -if __name__ == '__main__': - unittest.main() diff --git a/requirements/environment-docs.yml b/requirements/environment-docs.yml index 008e99d1c..3caaedc8b 100644 --- a/requirements/environment-docs.yml +++ b/requirements/environment-docs.yml @@ -10,7 +10,6 @@ dependencies: - numpy>=1.19.5 - scipy - tqdm - - pandas - scikit-learn - statsmodels - jinja2 diff --git a/requirements/environment-tests.yml b/requirements/environment-tests.yml index 36a47860d..508ed1030 100644 --- a/requirements/environment-tests.yml +++ b/requirements/environment-tests.yml @@ -10,7 +10,6 @@ dependencies: - numpy>=1.19.5 - scipy - tqdm - - pandas - scikit-learn - statsmodels - jinja2 diff --git a/requirements/environment.yml b/requirements/environment.yml index 8a03968ef..2ec822c27 100644 --- a/requirements/environment.yml +++ b/requirements/environment.yml @@ -9,7 +9,6 @@ dependencies: - numpy>=1.19.5 - scipy - tqdm - - pandas - scikit-learn - statsmodels - jinja2 diff --git a/requirements/requirements-extras.txt b/requirements/requirements-extras.txt index c43293b3e..5f4f65a9b 100644 --- a/requirements/requirements-extras.txt +++ b/requirements/requirements-extras.txt @@ -1,4 +1,3 @@ -pandas>=0.18.0 scikit-learn>=0.23.2 statsmodels>=0.12.1 jinja2>=2.11.2 # required for ASSET CUDA From 2fa63bccd495b051654120ab2adda6cb7fcb8ae4 Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Fri, 10 Nov 2023 18:12:15 +0100 Subject: [PATCH 2/4] [Main] remove deprecated features (#488) * removed deprecated aliases from unitary_event_analysis.py * updated unitary_event_analysis.ipynb accordingly * removed deprecated aliases from cell_assembly_detection.py * removed deprecated aliases from change_point_detection.py * removed deprecated aliases from conversion.py * stationary poisson process from parallel * fixed statistics.py, changed binsize to bin_size * removed deprecated aliases from cubic.py * removed deprecated aliases from current_source_density.py, refactored test_csd to test_current_source_density * refactor dictionary creation * removed deprecated aliases from gpfa.py, refactored test_gpfa.py to use StationaryPoissonProcess * remove homogeneous_poisson_process from spike_train_generation.py * removed deprecated aliases from gpfa_util.py * removed deprecated aliases from kernels.py * removed deprecated aliases from neo_tools.py * removed deprecated aliases from signal_processing.py * removed deprecated aliases from spade.py * removed deprecated aliases from spectral.py * removed deprecated aliases from spike_train_correlation.py * removed deprecated aliases from spike_train_dissimilarity.py * removed deprecated aliases from spike_train_generation.py * update keyword argument in spade tutorial * fix example code for cubic * fix example code spike_train_generation * fix example code gpfa * removed deprecated aliases from unitary_event_analysis.py * updated unitary_event_analysis.ipynb accordingly * removed deprecated aliases from change_point_detection.py * removed deprecated aliases from conversion.py * stationary poisson process from parallel * fixed statistics.py, changed binsize to bin_size * removed deprecated aliases from cubic.py * removed deprecated aliases from current_source_density.py, refactored test_csd to test_current_source_density * refactor dictionary creation * removed deprecated aliases from gpfa.py, refactored test_gpfa.py to use StationaryPoissonProcess * remove homogeneous_poisson_process from spike_train_generation.py * removed deprecated aliases from gpfa_util.py * removed deprecated aliases from kernels.py * removed deprecated aliases from neo_tools.py * removed deprecated aliases from signal_processing.py * removed deprecated aliases from spectral.py * removed deprecated aliases from spike_train_dissimilarity.py * removed deprecated aliases from spike_train_generation.py * update keyword argument in spade tutorial * fix example code for cubic * fix example code spike_train_generation * add missing import statement for check_neo_consistency * fix imports * fix pep8 * remove "TODO" from statistics tutorial notebook --- doc/tutorials/spade.ipynb | 6 +- doc/tutorials/statistics.ipynb | 21 -- doc/tutorials/unitary_event_analysis.ipynb | 5 +- elephant/cell_assembly_detection.py | 21 +- elephant/change_point_detection.py | 4 - elephant/conversion.py | 42 +--- elephant/cubic.py | 9 +- elephant/current_source_density.py | 5 - elephant/gpfa/gpfa.py | 18 +- elephant/gpfa/gpfa_util.py | 2 - elephant/kernels.py | 12 - elephant/neo_tools.py | 5 +- elephant/signal_processing.py | 11 +- elephant/spade.py | 2 +- elephant/spectral.py | 6 - elephant/spike_train_correlation.py | 35 ++- elephant/spike_train_dissimilarity.py | 17 -- elephant/spike_train_generation.py | 98 +++----- elephant/statistics.py | 2 +- elephant/test/test_current_source_density.py | 38 +-- elephant/test/test_gpfa.py | 5 +- elephant/test/test_parallel.py | 6 +- elephant/test/test_signal_processing.py | 27 ++- elephant/test/test_spectral.py | 26 ++- elephant/test/test_spike_train_correlation.py | 13 +- .../test/test_spike_train_dissimilarity.py | 16 +- elephant/test/test_spike_train_generation.py | 216 +++++++++--------- elephant/unitary_event_analysis.py | 5 +- 28 files changed, 260 insertions(+), 413 deletions(-) diff --git a/doc/tutorials/spade.ipynb b/doc/tutorials/spade.ipynb index 951220031..1c00c2547 100644 --- a/doc/tutorials/spade.ipynb +++ b/doc/tutorials/spade.ipynb @@ -52,7 +52,7 @@ "outputs": [], "source": [ "spiketrains = elephant.spike_train_generation.compound_poisson_process(\n", - " rate=5*pq.Hz, A=[0]+[0.98]+[0]*8+[0.02], t_stop=10*pq.s)\n", + " rate=5*pq.Hz, amplitude_distribution=[0]+[0.98]+[0]*8+[0.02], t_stop=10*pq.s)\n", "len(spiketrains)" ] }, @@ -152,7 +152,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "New_Filter", "language": "python", "name": "python3" }, @@ -166,7 +166,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.10.4" }, "latex_envs": { "LaTeX_envs_menu_present": true, diff --git a/doc/tutorials/statistics.ipynb b/doc/tutorials/statistics.ipynb index 5196e8860..46b08e43e 100644 --- a/doc/tutorials/statistics.ipynb +++ b/doc/tutorials/statistics.ipynb @@ -520,27 +520,6 @@ "source": [ "As predicted by theory, the CV values are clustered around 1." ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# TODO" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Spike interval statistics" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Statistics across spike trains" - ] } ], "metadata": { diff --git a/doc/tutorials/unitary_event_analysis.ipynb b/doc/tutorials/unitary_event_analysis.ipynb index 367f461e5..94d8d82d8 100644 --- a/doc/tutorials/unitary_event_analysis.ipynb +++ b/doc/tutorials/unitary_event_analysis.ipynb @@ -495,7 +495,7 @@ "outputs": [], "source": [ "UE = ue.jointJ_window_analysis(\n", - " spiketrains, bin_size=5*pq.ms, winsize=100*pq.ms, winstep=10*pq.ms, pattern_hash=[3])\n", + " spiketrains, bin_size=5*pq.ms, win_size=100*pq.ms, win_step=10*pq.ms, pattern_hash=[3])\n", "\n", "plot_ue(spiketrains, UE, significance_level=0.05)\n", "plt.show()" @@ -503,6 +503,9 @@ } ], "metadata": { + "interpreter": { + "hash": "623e048a0474aa032839f97d38ba0837cc9041adc49a14b480c72f2df8ea99e3" + }, "kernelspec": { "display_name": "inm-elephant", "language": "python", diff --git a/elephant/cell_assembly_detection.py b/elephant/cell_assembly_detection.py index 265d18f8e..bab3ef47b 100644 --- a/elephant/cell_assembly_detection.py +++ b/elephant/cell_assembly_detection.py @@ -71,28 +71,23 @@ import copy import math import time -import warnings import numpy as np from scipy.stats import f import elephant.conversion as conv -from elephant.utils import deprecated_alias __all__ = [ "cell_assembly_detection" ] -@deprecated_alias(data='binned_spiketrain', maxlag='max_lag', - min_occ='min_occurrences', - same_config_cut='same_configuration_pruning') def cell_assembly_detection(binned_spiketrain, max_lag, reference_lag=2, alpha=0.05, min_occurrences=1, size_chunks=100, max_spikes=np.inf, significance_pruning=True, subgroup_pruning=True, same_configuration_pruning=False, - bool_times_format=None, verbose=False): + verbose=False): """ Perform the CAD analysis :cite:`cad-Russo2017_e19428` for the binned (discretized) spike trains given in the input. The method looks for @@ -156,14 +151,6 @@ def cell_assembly_detection(binned_spiketrain, max_lag, reference_lag=2, efficient), not testing assemblies already formed if they appear in the very same configuration. Default: False - bool_times_format : bool, optional - - .. deprecated:: 0.10.0 - - Has no effect, the returning 'times' are always a quantity array - specifying the pattern spike times. - Default: None - verbose : bool, optional Regulates the number of prints given by the method. If true all prints are given, otherwise the method does give any prints. @@ -216,12 +203,6 @@ def cell_assembly_detection(binned_spiketrain, max_lag, reference_lag=2, size_chunks=size_chunks, max_spikes=max_spikes) - if bool_times_format is not None: - warnings.warn("'bool_times_format' is deprecated and has no effect; " - "the returning 'times' are always a quantity array " - "specifying the pattern spike times. Set this parameter " - "to None.", DeprecationWarning) - bin_size = binned_spiketrain.bin_size t_start = binned_spiketrain.t_start diff --git a/elephant/change_point_detection.py b/elephant/change_point_detection.py index 1c4f724d2..d42b30a66 100644 --- a/elephant/change_point_detection.py +++ b/elephant/change_point_detection.py @@ -45,15 +45,12 @@ import numpy as np import quantities as pq -from elephant.utils import deprecated_alias - __all__ = [ "multiple_filter_test", "empirical_parameters" ] -@deprecated_alias(dt='time_step') def multiple_filter_test(window_sizes, spiketrain, t_final, alpha, n_surrogates=1000, test_quantile=None, test_param=None, time_step=None): @@ -253,7 +250,6 @@ def _limit_processes(window_sizes, t_final, time_step): return limit_processes -@deprecated_alias(dt='time_step') def empirical_parameters(window_sizes, t_final, alpha, n_surrogates=1000, time_step=None): r""" diff --git a/elephant/conversion.py b/elephant/conversion.py index 6c8ae41d9..276da8ec3 100644 --- a/elephant/conversion.py +++ b/elephant/conversion.py @@ -84,7 +84,7 @@ import quantities as pq import scipy.sparse as sps -from elephant.utils import is_binary, deprecated_alias, is_time_quantity, \ +from elephant.utils import is_binary, is_time_quantity, \ check_neo_consistency, round_binning_errors __all__ = [ @@ -334,7 +334,6 @@ class BinnedSpikeTrain(object): """ - @deprecated_alias(binsize='bin_size', num_bins='n_bins') def __init__(self, spiketrains, bin_size=None, n_bins=None, t_start=None, t_stop=None, tolerance=1e-8, sparse_format="csr"): if sparse_format not in ("csr", "csc"): @@ -387,24 +386,6 @@ def t_stop(self): """ return pq.Quantity(self._t_stop, units=self.units, copy=False) - @property - def binsize(self): - """ - Deprecated in favor of :attr:`bin_size`. - """ - warnings.warn("'.binsize' is deprecated; use '.bin_size'", - DeprecationWarning) - return self._bin_size - - @property - def num_bins(self): - """ - Deprecated in favor of :attr:`n_bins`. - """ - warnings.warn("'.num_bins' is deprecated; use '.n_bins'", - DeprecationWarning) - return self.n_bins - def __repr__(self): return f"{type(self).__name__}(t_start={str(self.t_start)}, " \ f"t_stop={str(self.t_stop)}, bin_size={str(self.bin_size)}; " \ @@ -619,27 +600,6 @@ def bin_centers(self): bin_centers = pq.Quantity(bin_centers, units=self.units, copy=False) return bin_centers - def to_sparse_array(self): - """ - Getter for sparse matrix with time points. Deprecated in favor of - :attr:`sparse_matrix`. - - Returns - ------- - scipy.sparse.csr_matrix or scipy.sparse.csc_matrix - Sparse matrix, version with spike counts. - - See also - -------- - scipy.sparse.csr_matrix - to_array - - """ - warnings.warn("'.to_sparse_array()' function is deprecated; " - "use '.sparse_matrix' attribute directly", - DeprecationWarning) - return self.sparse_matrix - def to_sparse_bool_array(self): """ Getter for boolean version of the sparse matrix, calculated from diff --git a/elephant/cubic.py b/elephant/cubic.py index 513baa389..baf5a70b5 100644 --- a/elephant/cubic.py +++ b/elephant/cubic.py @@ -22,11 +22,11 @@ >>> import quantities as pq >>> from elephant import statistics >>> from elephant.cubic import cubic ->>> from elephant.spike_train_generation import homogeneous_poisson_process +>>> from elephant.spike_train_generation import StationaryPoissonProcess >>> np.random.seed(10) ->>> spiketrains = [homogeneous_poisson_process(rate=10*pq.Hz, -... t_stop=10 * pq.s) for _ in range(20)] +>>> spiketrains = [StationaryPoissonProcess(rate=10*pq.Hz, +... t_stop=10 * pq.s).generate_spiketrain() for _ in range(20)] >>> pop_count = statistics.time_histogram(spiketrains, bin_size=0.1 * pq.s) 2) apply CuBIC to the population count @@ -52,8 +52,6 @@ import scipy.special import scipy.stats -from elephant.utils import deprecated_alias - __all__ = [ "cubic" ] @@ -63,7 +61,6 @@ # Adaptation to python by Pietro Quaglio and Emiliano Torre -@deprecated_alias(data='histogram', ximax='max_iterations') def cubic(histogram, max_iterations=100, alpha=0.05): r""" Performs the CuBIC analysis :cite:`cubic-Staude2010_327` on a population diff --git a/elephant/current_source_density.py b/elephant/current_source_density.py index fd73d0733..094e66853 100644 --- a/elephant/current_source_density.py +++ b/elephant/current_source_density.py @@ -42,7 +42,6 @@ import elephant.current_source_density_src.utility_functions as utils from elephant.current_source_density_src import KCSD, icsd -from elephant.utils import deprecated_alias __all__ = [ "estimate_csd", @@ -61,7 +60,6 @@ py_iCSD_toolbox = ['StandardCSD'] + icsd_methods -@deprecated_alias(coords='coordinates') def estimate_csd(lfp, coordinates='coordinates', method=None, process_estimate=True, **kwargs): """ @@ -209,9 +207,6 @@ def estimate_csd(lfp, coordinates='coordinates', method=None, return output -@deprecated_alias(ele_xx='x_positions', ele_yy='y_positions', - ele_zz='z_positions', xlims='x_limits', ylims='y_limits', - zlims='z_limits', res='resolution') def generate_lfp(csd_profile, x_positions, y_positions=None, z_positions=None, x_limits=[0., 1.], y_limits=[0., 1.], z_limits=[0., 1.], resolution=50): diff --git a/elephant/gpfa/gpfa.py b/elephant/gpfa/gpfa.py index 726cf3ca1..4d671867e 100644 --- a/elephant/gpfa/gpfa.py +++ b/elephant/gpfa/gpfa.py @@ -75,10 +75,8 @@ import numpy as np import quantities as pq import sklearn -import warnings from elephant.gpfa import gpfa_core, gpfa_util -from elephant.utils import deprecated_alias __all__ = [ @@ -212,12 +210,10 @@ class GPFA(sklearn.base.BaseEstimator): ... n_channels = 20 ... firing_rates = np.random.randint(low=1, high=100, ... size=n_channels) * pq.Hz - ... spike_times = [] - ... for rate in firing_rates: - ... spike_times.append( - ... StationaryPoissonProcess(rate=rate).generate_spiketrain()) - ... data.append((trial, spike_times)) - + >>> spike_times = [StationaryPoissonProcess(rate + ... ).generate_spiketrain() for rate in firing_rates] + >>> data.append((trial, spike_times)) + ... >>> gpfa = GPFA(bin_size=20*pq.ms, x_dim=8) >>> gpfa.fit(data) # doctest: +SKIP >>> results = gpfa.transform(data, returned_data=['latent_variable_orth', @@ -232,7 +228,6 @@ class GPFA(sklearn.base.BaseEstimator): ... 'latent_variable']) """ - @deprecated_alias(binsize='bin_size') def __init__(self, bin_size=20 * pq.ms, x_dim=3, min_var_frac=0.01, tau_init=100.0 * pq.ms, eps_init=1.0E-3, em_tol=1.0E-8, em_max_iters=500, freq_ll=5, verbose=False): @@ -263,11 +258,6 @@ def __init__(self, bin_size=20 * pq.ms, x_dim=3, min_var_frac=0.01, self.fit_info = dict() self.transform_info = dict() - @property - def binsize(self): - warnings.warn("'binsize' is deprecated; use 'bin_size'") - return self.bin_size - def fit(self, spiketrains): """ Fit the model with the given training data. diff --git a/elephant/gpfa/gpfa_util.py b/elephant/gpfa/gpfa_util.py index 0491aec05..d9ba84f5d 100644 --- a/elephant/gpfa/gpfa_util.py +++ b/elephant/gpfa/gpfa_util.py @@ -15,10 +15,8 @@ import scipy as sp from elephant.conversion import BinnedSpikeTrain -from elephant.utils import deprecated_alias -@deprecated_alias(binsize='bin_size') def get_seqs(data, bin_size, use_sqrt=True): """ Converts the data into a rec array using internally BinnedSpikeTrain. diff --git a/elephant/kernels.py b/elephant/kernels.py index ad162f418..4cdfda4b7 100644 --- a/elephant/kernels.py +++ b/elephant/kernels.py @@ -81,8 +81,6 @@ import scipy.special import scipy.stats -from elephant.utils import deprecated_alias - __all__ = [ 'RectangularKernel', 'TriangularKernel', 'EpanechnikovLikeKernel', 'GaussianKernel', 'LaplacianKernel', 'ExponentialKernel', 'AlphaKernel' @@ -166,7 +164,6 @@ def __repr__(self): return "{cls}(sigma={sigma}, invert={invert})".format( cls=self.__class__.__name__, sigma=self.sigma, invert=self.invert) - @deprecated_alias(t='times') def __call__(self, times): """ Evaluates the kernel at all points in the array `times`. @@ -287,7 +284,6 @@ def _check_time_input(self, t): "Otherwise a normalization to 1 of the kernel " "cannot be performed.") - @deprecated_alias(t='time') def cdf(self, time): r""" Cumulative Distribution Function, CDF. @@ -323,7 +319,6 @@ def icdf(self, fraction): """ raise NotImplementedError - @deprecated_alias(t='times') def median_index(self, times): r""" Estimates the index of the Median of the kernel. @@ -469,7 +464,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / t_units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) tau = math.sqrt(3) * self.sigma.rescale(time.units).magnitude @@ -536,7 +530,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / times.units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) tau = math.sqrt(6) * self.sigma.rescale(time.units).magnitude @@ -609,7 +602,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / times.units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) tau = math.sqrt(5) * self.sigma.rescale(time.units).magnitude @@ -732,7 +724,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / times.units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) sigma = self.sigma.rescale(time.units).magnitude @@ -794,7 +785,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / times.units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) tau = self.sigma.rescale(time.units).magnitude / math.sqrt(2) @@ -863,7 +853,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / times.units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) tau = self.sigma.rescale(time.units).magnitude @@ -936,7 +925,6 @@ def _evaluate(self, times): kernel = pq.Quantity(kernel, units=1 / t_units) return kernel - @deprecated_alias(t='time') def cdf(self, time): self._check_time_input(time) tau = self.sigma.rescale(time.units).magnitude / math.sqrt(2) diff --git a/elephant/neo_tools.py b/elephant/neo_tools.py index ec881470d..172398c01 100644 --- a/elephant/neo_tools.py +++ b/elephant/neo_tools.py @@ -21,7 +21,6 @@ from neo.core.spiketrainlist import SpikeTrainList from neo.core.container import unique_objs -from elephant.utils import deprecated_alias __all__ = [ "extract_neo_attributes", @@ -31,7 +30,6 @@ ] -@deprecated_alias(obj='neo_object') def extract_neo_attributes(neo_object, parents=True, child_first=True, skip_array=False, skip_none=False): """ @@ -181,7 +179,8 @@ def get_all_spiketrains(container): Returns ------- list - A `neo.SpikeTrainList` object of the unique `neo.SpikeTrain` objects in `container`. + A `neo.SpikeTrainList` object of the unique `neo.SpikeTrain` objects + in `container`. """ return SpikeTrainList(_get_all_objs(container, 'SpikeTrain')) diff --git a/elephant/signal_processing.py b/elephant/signal_processing.py index 1e8fe8631..65c66e2a4 100644 --- a/elephant/signal_processing.py +++ b/elephant/signal_processing.py @@ -25,9 +25,7 @@ import quantities as pq import scipy.signal -from elephant.utils import deprecated_alias, check_same_units - -import warnings +from elephant.utils import check_same_units __all__ = [ "zscore", @@ -197,8 +195,6 @@ def zscore(signal, inplace=True): return signal_ztransformed -@deprecated_alias(ch_pairs='channel_pairs', nlags='n_lags', - env='hilbert_envelope') def cross_correlation_function(signal, channel_pairs, hilbert_envelope=False, n_lags=None, scaleopt='unbiased'): r""" @@ -391,9 +387,6 @@ def cross_correlation_function(signal, channel_pairs, hilbert_envelope=False, return cross_corr -@deprecated_alias(highpass_freq='highpass_frequency', - lowpass_freq='lowpass_frequency', - fs='sampling_frequency') def butter(signal, highpass_frequency=None, lowpass_frequency=None, order=4, filter_function='filtfilt', sampling_frequency=1.0, axis=-1): """ @@ -565,7 +558,6 @@ def butter(signal, highpass_frequency=None, lowpass_frequency=None, order=4, return filtered_data -@deprecated_alias(nco='n_cycles', freq='frequency', fs='sampling_frequency') def wavelet_transform(signal, frequency, n_cycles=6.0, sampling_frequency=1.0, zero_padding=True): r""" @@ -737,7 +729,6 @@ def _morlet_wavelet_ft(freq, n_cycles, fs, n): return signal_wt -@deprecated_alias(N='padding') def hilbert(signal, padding='nextpow'): """ Apply a Hilbert transform to a `neo.AnalogSignal` object in order to diff --git a/elephant/spade.py b/elephant/spade.py index 2fe8a7004..5284b1712 100644 --- a/elephant/spade.py +++ b/elephant/spade.py @@ -29,6 +29,7 @@ Notes ----- + This modules relies on the C++ implementation of the fp-growth algorithm developed by Forian Porrmann (available at https://github.com/fporrmann/FPG). The module replaces a more generic @@ -2262,7 +2263,6 @@ def _covered_spikes_criterion(occ_superset, return reject_superset, reject_subset -@deprecated_alias(binsize='bin_size') def concept_output_to_patterns(concepts, winlen, bin_size, pv_spec=None, spectrum='#', t_start=0 * pq.ms): """ diff --git a/elephant/spectral.py b/elephant/spectral.py index 24fac42f0..6c2e978bd 100644 --- a/elephant/spectral.py +++ b/elephant/spectral.py @@ -25,8 +25,6 @@ import quantities as pq import scipy.signal -from elephant.utils import deprecated_alias - __all__ = [ "welch_psd", "welch_coherence", @@ -37,8 +35,6 @@ ] -@deprecated_alias(num_seg='n_segments', len_seg='len_segment', - freq_res='frequency_resolution') def welch_psd(signal, n_segments=8, len_segment=None, frequency_resolution=None, overlap=0.5, fs=1.0, window='hann', nfft=None, detrend='constant', return_onesided=True, @@ -1124,8 +1120,6 @@ def multitaper_coherence(signal_i, signal_j, n_segments=1, len_segment=None, return freqs, coherence, phase_lag -@deprecated_alias(x='signal_i', y='signal_j', num_seg='n_segments', - len_seg='len_segment', freq_res='frequency_resolution') def welch_coherence(signal_i, signal_j, n_segments=8, len_segment=None, frequency_resolution=None, overlap=0.5, fs=1.0, window='hann', nfft=None, detrend='constant', diff --git a/elephant/spike_train_correlation.py b/elephant/spike_train_correlation.py index da79abdf2..9abf1e35a 100644 --- a/elephant/spike_train_correlation.py +++ b/elephant/spike_train_correlation.py @@ -23,9 +23,8 @@ import quantities as pq import scipy.signal from scipy import integrate +from elephant.utils import check_neo_consistency -from elephant.conversion import BinnedSpikeTrain -from elephant.utils import deprecated_alias, check_neo_consistency __all__ = [ "covariance", @@ -277,7 +276,6 @@ def kernel_smoothing(self, cross_corr_array, kernel): return np.convolve(cross_corr_array, kernel, mode='same') -@deprecated_alias(binned_sts='binned_spiketrain') def covariance(binned_spiketrain, binary=False, fast=True): r""" Calculate the NxN matrix of pairwise covariances between all combinations @@ -356,10 +354,10 @@ def covariance(binned_spiketrain, binary=False, fast=True): >>> from elephant.spike_train_correlation import covariance >>> np.random.seed(1) - >>> st1 = StationaryPoissonProcess( - ... rate=10*pq.Hz, t_stop=10.0*pq.s).generate_spiketrain() - >>> st2 = StationaryPoissonProcess( - ... rate=10*pq.Hz, t_stop=10.0*pq.s).generate_spiketrain() + >>> st1 = StationaryPoissonProcess(rate=10*pq.Hz, + ... t_stop=10.0*pq.s).generate_spiketrain() + >>> st2 = StationaryPoissonProcess(rate=10*pq.Hz, + ... t_stop=10.0*pq.s).generate_spiketrain() >>> cov_matrix = covariance(BinnedSpikeTrain([st1, st2], bin_size=5*pq.ms)) >>> cov_matrix # doctest: +SKIP array([[ 0.05432316, -0.00152276], @@ -378,7 +376,6 @@ def covariance(binned_spiketrain, binary=False, fast=True): binned_spiketrain, corrcoef_norm=False) -@deprecated_alias(binned_sts='binned_spiketrain') def correlation_coefficient(binned_spiketrain, binary=False, fast=True): r""" Calculate the NxN matrix of pairwise Pearson's correlation coefficients @@ -465,10 +462,10 @@ def correlation_coefficient(binned_spiketrain, binary=False, fast=True): >>> from elephant.spike_train_correlation import correlation_coefficient >>> np.random.seed(1) - >>> st1 = StationaryPoissonProcess( - ... rate=10*pq.Hz, t_stop=10.0*pq.s).generate_spiketrain() - >>> st2 = StationaryPoissonProcess( - ... rate=10*pq.Hz, t_stop=10.0*pq.s).generate_spiketrain() + >>> st1 = StationaryPoissonProcess(rate=10*pq.Hz, + ... t_stop=10.0*pq.s).generate_spiketrain() + >>> st2 = StationaryPoissonProcess(rate=10*pq.Hz, + ... t_stop=10.0*pq.s).generate_spiketrain() >>> corrcoef = correlation_coefficient(BinnedSpikeTrain([st1, st2], ... bin_size=5*pq.ms)) >>> corrcoef # doctest: +SKIP @@ -552,9 +549,6 @@ def _covariance_sparse(binned_spiketrain, corrcoef_norm): return res -@deprecated_alias(binned_st1='binned_spiketrain_i', - binned_st2='binned_spiketrain_j', - cross_corr_coef='cross_correlation_coefficient') def cross_correlation_histogram( binned_spiketrain_i, binned_spiketrain_j, window='full', border_correction=False, binary=False, kernel=None, method='speed', @@ -675,16 +669,17 @@ def cross_correlation_histogram( >>> import numpy as np >>> from elephant.conversion import BinnedSpikeTrain >>> from elephant.spike_train_generation import StationaryPoissonProcess - >>> from elephant.spike_train_correlation import cross_correlation_histogram # noqa - + >>> from elephant.spike_train_correlation import cross_correlation_histogram # noqa >>> np.random.seed(1) >>> binned_spiketrain_i = BinnedSpikeTrain( ... StationaryPoissonProcess( - ... 10. * pq.Hz, t_start=0 * pq.ms, t_stop=5000 * pq.ms).generate_spiketrain(), + ... 10. * pq.Hz, t_start=0 * pq.ms, + ... t_stop=5000 * pq.ms).generate_spiketrain(), ... bin_size=5. * pq.ms) >>> binned_spiketrain_j = BinnedSpikeTrain( ... StationaryPoissonProcess( - ... 10. * pq.Hz, t_start=0 * pq.ms, t_stop=5000 * pq.ms).generate_spiketrain(), + ... 10. * pq.Hz, t_start=0 * pq.ms, + ... t_stop=5000 * pq.ms).generate_spiketrain(), ... bin_size=5. * pq.ms) >>> cc_hist, lags = cross_correlation_histogram( @@ -823,7 +818,6 @@ def cross_correlation_histogram( cch = cross_correlation_histogram -@deprecated_alias(spiketrain_1='spiketrain_i', spiketrain_2='spiketrain_j') def spike_time_tiling_coefficient(spiketrain_i: neo.core.SpikeTrain, spiketrain_j: neo.core.SpikeTrain, dt: pq.Quantity = 0.005 * pq.s) -> float: @@ -998,7 +992,6 @@ def run_t(spiketrain: neo.core.SpikeTrain, dt: pq.Quantity = dt) -> float: sttc = spike_time_tiling_coefficient -@deprecated_alias(binned_st='binned_spiketrain', tau_max='max_tau') def spike_train_timescale(binned_spiketrain, max_tau): r""" Calculates the auto-correlation time of a binned spike train; uses the diff --git a/elephant/spike_train_dissimilarity.py b/elephant/spike_train_dissimilarity.py index 2922f5340..217322b2f 100644 --- a/elephant/spike_train_dissimilarity.py +++ b/elephant/spike_train_dissimilarity.py @@ -22,15 +22,12 @@ from __future__ import division, print_function, unicode_literals -import warnings - import numpy as np import quantities as pq import scipy as sp from neo.core import SpikeTrain import elephant.kernels as kernels -from elephant.utils import deprecated_alias __all__ = [ "victor_purpura_distance", @@ -51,7 +48,6 @@ def _create_matrix_from_indexed_function( return mat -@deprecated_alias(trains='spiketrains', q='cost_factor') def victor_purpura_distance(spiketrains, cost_factor=1.0 * pq.Hz, kernel=None, sort=True, algorithm='fast'): """ @@ -163,12 +159,6 @@ def compute(i, j): (len(spiketrains), len(spiketrains)), compute, kernel.is_symmetric()) -def victor_purpura_dist(*args, **kwargs): - warnings.warn("'victor_purpura_dist' funcion is deprecated; " - "use 'victor_purpura_distance'", DeprecationWarning) - return victor_purpura_distance(*args, **kwargs) - - def _victor_purpura_dist_for_st_pair_fast(spiketrain_a, spiketrain_b, kernel): """ The algorithm used is based on the one given in @@ -304,7 +294,6 @@ def _victor_purpura_dist_for_st_pair_intuitive(spiketrain_a, spiketrain_b, return scr[nspk_a, nspk_b] -@deprecated_alias(trains='spiketrains', tau='time_constant') def van_rossum_distance(spiketrains, time_constant=1.0 * pq.s, sort=True): """ Calculates the van Rossum distance :cite:`dissimilarity-Rossum2001_751`, @@ -378,12 +367,6 @@ def van_rossum_distance(spiketrains, time_constant=1.0 * pq.s, sort=True): return sp.sqrt(vr_dist) -def van_rossum_dist(*args, **kwargs): - warnings.warn("'van_rossum_dist' function is deprecated; " - "use 'van_rossum_distance'", DeprecationWarning) - return van_rossum_distance(*args, **kwargs) - - def _summed_dist_matrix(spiketrains, tau, presorted=False): # The algorithm underlying this implementation is described in # Houghton, C., & Kreuz, T. (2012). On the efficient calculation of van diff --git a/elephant/spike_train_generation.py b/elephant/spike_train_generation.py index 9fb24ffb5..2fd6c522a 100644 --- a/elephant/spike_train_generation.py +++ b/elephant/spike_train_generation.py @@ -63,7 +63,6 @@ from scipy.special import gammainc, gammaincc from elephant.spike_train_surrogates import dither_spike_train -from elephant.utils import deprecated_alias __all__ = [ "spike_extraction", @@ -84,7 +83,6 @@ ] -@deprecated_alias(extr_interval='interval') def spike_extraction(signal, threshold=0.0 * pq.mV, sign='above', time_stamps=None, interval=(-2 * pq.ms, 4 * pq.ms)): """ @@ -244,7 +242,6 @@ def threshold_detection(signal, threshold=0.0 * pq.mV, sign='above'): return result_st -@deprecated_alias(format='as_array') def peak_detection(signal, threshold=0.0 * pq.mV, sign='above', as_array=False): """ @@ -267,15 +264,6 @@ def peak_detection(signal, threshold=0.0 * pq.mV, sign='above', If True, a NumPy array of the resulting peak times is returned instead of a (default) `neo.SpikeTrain` object. Default: False - format : {None, 'raw'}, optional - - .. deprecated:: 0.8.0 - - Whether to return as SpikeTrain (None) or as a plain array of times - ('raw'). - Deprecated. Use `as_array=False` for None format and `as_array=True` - otherwise. - Default: None Returns ------- @@ -289,11 +277,6 @@ def peak_detection(signal, threshold=0.0 * pq.mV, sign='above', if sign not in ('above', 'below'): raise ValueError("sign should be 'above' or 'below'") - if as_array in (None, 'raw'): - warnings.warn("'format' is deprecated; use as_array=True", - DeprecationWarning) - as_array = bool(as_array) - if sign == 'above': cutout = np.where(signal > threshold)[0] peak_func = np.argmax @@ -612,22 +595,17 @@ class StationaryPoissonProcess(RenewalProcess): Examples -------- - >>> import quantities as pq - >>> spiketrain = StationaryPoissonProcess(rate=50.*pq.Hz, t_start=0*pq.ms, - ... t_stop=1000*pq.ms).generate_spiketrain() - >>> spiketrain_array = StationaryPoissonProcess( - ... rate=20*pq.Hz, t_start=5000*pq.ms, t_stop=10000*pq.ms - ... ).generate_spiketrain(as_array=True) - >>> spiketrain = StationaryPoissonProcess( - ... rate=50*pq.Hz, - ... t_start=0*pq.ms, t_stop=1000*pq.ms, - ... refractory_period = 3*pq.ms).generate_spiketrain() + >>> import quantities as pq # noqa + >>> from elephant.spike_train_generation import StationaryPoissonProcess + >>> spiketrain = StationaryPoissonProcess(rate=50.*pq.Hz,t_stop=1000*pq.ms,t_start=0*pq.ms).generate_spiketrain() + >>> spiketrain_array = StationaryPoissonProcess(rate=20*pq.Hz,t_stop=10000*pq.ms,t_start=5000*pq.ms).generate_spiketrain(as_array=True) + >>> spiketrain = StationaryPoissonProcess(rate=50*pq.Hz,t_stop=1000*pq.ms,t_start=0*pq.ms,refractory_period=3*pq.ms).generate_spiketrain() """ def __init__( self, rate: pq.Quantity, - t_stop: pq.Quantity = 1.*pq.s, - t_start: pq.Quantity = 0.*pq.s, + t_start: pq.Quantity = 0.0 * pq.ms, + t_stop: pq.Quantity = 1000.0*pq.ms, refractory_period: Optional[pq.Quantity] = None, equilibrium: bool = True ): @@ -1005,9 +983,8 @@ def __init__(self, rate_signal: neo.AnalogSignal, super().__init__(rate_signal=rate_signal) self.process_operational_time = StationaryPoissonProcess( - rate=self.mean_rate * 1./self.units, - t_start=self.t_start, - t_stop=self.t_stop) + rate=self.mean_rate * 1. / self.units, t_stop=self.t_stop, + t_start=self.t_start) self.refractory_period = refractory_period if self.refractory_period is not None: @@ -1107,20 +1084,23 @@ def homogeneous_poisson_process(rate, t_start=0.0 * pq.ms, Examples -------- >>> import quantities as pq - >>> spikes = homogeneous_poisson_process(50*pq.Hz, t_start=0*pq.ms, - ... t_stop=1000*pq.ms) - >>> spikes = homogeneous_poisson_process( - ... 20*pq.Hz, t_start=5000*pq.ms, t_stop=10000*pq.ms, as_array=True) - >>> spikes = homogeneous_poisson_process(50*pq.Hz, t_start=0*pq.ms, - ... t_stop=1000*pq.ms, refractory_period = 3*pq.ms) + >>> spikes = StationaryPoissonProcess(50*pq.Hz, t_start=0*pq.ms, + ... t_stop=1000*pq.ms).generate_spiketrain() + >>> spikes = StationaryPoissonProcess( + ... 20*pq.Hz, t_start=5000*pq.ms, + ... t_stop=10000*pq.ms).generate_spiketrain(as_array=True) + >>> spikes = StationaryPoissonProcess(50*pq.Hz, t_start=0*pq.ms, + ... t_stop=1000*pq.ms, + ... refractory_period = 3*pq.ms).generate_spiketrain() """ warnings.warn( "'homogeneous_poisson_process' is deprecated;" " use 'StationaryPoissonProcess'.", DeprecationWarning) - process = StationaryPoissonProcess( - rate=rate, t_start=t_start, t_stop=t_stop, - refractory_period=refractory_period, equilibrium=False) + process = StationaryPoissonProcess(rate=rate, t_stop=t_stop, + t_start=t_start, + refractory_period=refractory_period, + equilibrium=False) return process.generate_spiketrain(as_array=as_array) @@ -1212,10 +1192,10 @@ def homogeneous_gamma_process(a, b, t_start=0.0 * pq.ms, t_stop=1000.0 * pq.ms, Examples -------- >>> import quantities as pq - >>> spikes = homogeneous_gamma_process(2.0, 50*pq.Hz, 0*pq.ms, - ... 1000*pq.ms) - >>> spikes = homogeneous_gamma_process( - ... 5.0, 20*pq.Hz, 5000*pq.ms, 10000*pq.ms, as_array=True) + >>> spikes = StationaryPoissonProcess(50*pq.Hz, 0*pq.ms, 1000*pq.ms + ... ).generate_spiketrain() + >>> spikes = StationaryPoissonProcess(20*pq.Hz, 5000*pq.ms, + ... 10000*pq.ms).generate_spiketrain(as_array=True) """ warnings.warn( @@ -1269,7 +1249,6 @@ def inhomogeneous_gamma_process(rate, shape_factor, as_array=False): return process.generate_spiketrain(as_array=as_array) -@deprecated_alias(n='n_spiketrains') def _n_poisson(rate, t_stop, t_start=0.0 * pq.ms, n_spiketrains=1): """ Generates one or more independent Poisson spike trains. @@ -1315,17 +1294,16 @@ def _n_poisson(rate, t_stop, t_start=0.0 * pq.ms, n_spiketrains=1): # one rate for all spike trains if rate.ndim == 0: return StationaryPoissonProcess( - rate=rate, t_start=t_start, t_stop=t_stop - ).generate_n_spiketrains(n_spiketrains) + rate=rate, + t_stop=t_stop, + t_start=t_start).generate_n_spiketrains(n_spiketrains) # different rate for each spike train - return [StationaryPoissonProcess( - rate=single_rate, t_start=t_start, t_stop=t_stop).generate_spiketrain() + return [StationaryPoissonProcess(rate=single_rate, t_stop=t_stop, + t_start=t_start).generate_spiketrain() for single_rate in rate] -@deprecated_alias(rate_c='coincidence_rate', n='n_spiketrains', - return_coinc='return_coincidences') def single_interaction_process( rate, coincidence_rate, t_stop, n_spiketrains=2, jitter=0 * pq.ms, coincidences='deterministic', t_start=0 * pq.ms, min_delay=0 * pq.ms, @@ -1470,8 +1448,9 @@ def single_interaction_process( if len(coinc_times) < 2 or min(np.diff(coinc_times)) >= min_delay: break else: # coincidences == 'stochastic' - poisson_process = StationaryPoissonProcess( - rate=coincidence_rate, t_stop=t_stop, t_start=t_start) + poisson_process = StationaryPoissonProcess(rate=coincidence_rate, + t_stop=t_stop, + t_start=t_start) while True: coinc_times = poisson_process.generate_spiketrain() if len(coinc_times) < 2 or min(np.diff(coinc_times)) >= min_delay: @@ -1627,9 +1606,8 @@ def _mother_proc_cpp_stat( amplitude_distribution, np.arange(n_spiketrains + 1)) # expected rate of the mother process exp_mother_rate = (n_spiketrains * rate) / exp_amplitude - return StationaryPoissonProcess( - rate=exp_mother_rate, t_stop=t_stop, t_start=t_start - ).generate_spiketrain() + return StationaryPoissonProcess(rate=exp_mother_rate, t_stop=t_stop, + t_start=t_start).generate_spiketrain() def _cpp_hom_stat(amplitude_distribution, t_stop, rate, t_start=0 * pq.ms): @@ -1751,9 +1729,8 @@ def _cpp_het_stat(amplitude_distribution, t_stop, rates, t_start=0.*pq.ms): # Generate the independent heterogeneous Poisson processes poisson_spiketrains = \ - [StationaryPoissonProcess( - rate=rate - r_min, t_start=t_start, t_stop=t_stop - ).generate_spiketrain() + [StationaryPoissonProcess(rate=rate - r_min, t_stop=t_stop, + t_start=t_start).generate_spiketrain() for rate in rates] # Pool the correlated CPP and the corresponding Poisson processes @@ -1763,7 +1740,6 @@ def _cpp_het_stat(amplitude_distribution, t_stop, rates, t_start=0.*pq.ms): in zip(compound_poisson_spiketrains, poisson_spiketrains)] -@deprecated_alias(A='amplitude_distribution') def compound_poisson_process( rate, amplitude_distribution, t_stop, shift=None, t_start=0 * pq.ms): """ diff --git a/elephant/statistics.py b/elephant/statistics.py index f98922b32..868b4b06d 100644 --- a/elephant/statistics.py +++ b/elephant/statistics.py @@ -1573,7 +1573,7 @@ def _epoch_with_spread(self): Get an epoch object of the complexity distribution with `spread` > 0 """ bst = conv.BinnedSpikeTrain(self.input_spiketrains, - binsize=self.bin_size, + bin_size=self.bin_size, tolerance=self.tolerance) if self.binary: diff --git a/elephant/test/test_current_source_density.py b/elephant/test/test_current_source_density.py index b17ba3f89..b48b2b361 100644 --- a/elephant/test/test_current_source_density.py +++ b/elephant/test/test_current_source_density.py @@ -53,24 +53,27 @@ def setUp(self): self.lfp = csd.generate_lfp(utils.gauss_1d_dipole, self.ele_pos) self.csd_method = csd.estimate_csd - self.params = {} # Input dictionaries for each method - self.params['DeltaiCSD'] = {'sigma_top': 0. * pq.S / pq.m, - 'diam': 500E-6 * pq.m} - self.params['StepiCSD'] = {'sigma_top': 0. * pq.S / pq.m, 'tol': 1E-12, - 'diam': 500E-6 * pq.m} - self.params['SplineiCSD'] = {'sigma_top': 0. * pq.S / pq.m, - 'num_steps': 201, 'tol': 1E-12, - 'diam': 500E-6 * pq.m} - self.params['StandardCSD'] = {} - self.params['KCSD1D'] = {'h': 50., 'Rs': np.array((0.1, 0.25, 0.5))} + # Input dictionaries for each method + self.params = {'DeltaiCSD': {'sigma_top': 0. * pq.S / pq.m, + 'diam': 500E-6 * pq.m}, + 'StepiCSD': {'sigma_top': 0. * pq.S / pq.m, + 'tol': 1E-12, + 'diam': 500E-6 * pq.m}, + 'SplineiCSD': {'sigma_top': 0. * pq.S / pq.m, + 'num_steps': 201, 'tol': 1E-12, + 'diam': 500E-6 * pq.m}, + 'StandardCSD': {}, 'KCSD1D': {'h': 50., + 'Rs': np.array( + (0.1, 0.25, 0.5))}} def test_validate_inputs(self): self.assertRaises(TypeError, self.csd_method, lfp=[[1], [2], [3]]) self.assertRaises(ValueError, self.csd_method, lfp=self.lfp, - coords=self.ele_pos * pq.mm) + coordinates=self.ele_pos * pq.mm) # inconsistent number of electrodes self.assertRaises(ValueError, self.csd_method, lfp=self.lfp, - coords=[1, 2, 3, 4] * pq.mm, method='StandardCSD') + coordinates=[1, 2, 3, 4] * pq.mm, + method='StandardCSD') # bad method name self.assertRaises(ValueError, self.csd_method, lfp=self.lfp, method='InvalidMethodName') @@ -123,8 +126,8 @@ class CSD2D_TestCase(unittest.TestCase): def setUp(self): xx_ele, yy_ele = utils.generate_electrodes(dim=2) self.lfp = csd.generate_lfp(utils.large_source_2D, xx_ele, yy_ele) - self.params = {} # Input dictionaries for each method - self.params['KCSD2D'] = {'sigma': 1., 'Rs': np.array((0.1, 0.25, 0.5))} + self.params = {'KCSD2D': {'sigma': 1., 'Rs': np.array( + (0.1, 0.25, 0.5))}} # Input dictionaries for each method def test_kcsd2d_init(self): method = 'KCSD2D' @@ -140,10 +143,9 @@ def setUp(self): xx_ele, yy_ele, zz_ele = utils.generate_electrodes(dim=3) self.lfp = csd.generate_lfp(utils.gauss_3d_dipole, xx_ele, yy_ele, zz_ele) - self.params = {} - self.params['KCSD3D'] = {'gdx': 0.1, 'gdy': 0.1, 'gdz': 0.1, - 'src_type': 'step', - 'Rs': np.array((0.1, 0.25, 0.5))} + self.params = {'KCSD3D': {'gdx': 0.1, 'gdy': 0.1, 'gdz': 0.1, + 'src_type': 'step', + 'Rs': np.array((0.1, 0.25, 0.5))}} def test_kcsd2d_init(self): method = 'KCSD3D' diff --git a/elephant/test/test_gpfa.py b/elephant/test/test_gpfa.py index 04aae6738..3a7c7a096 100644 --- a/elephant/test/test_gpfa.py +++ b/elephant/test/test_gpfa.py @@ -13,7 +13,7 @@ import quantities as pq from numpy.testing import assert_array_equal, assert_array_almost_equal -from elephant.spike_train_generation import homogeneous_poisson_process +from elephant.spike_train_generation import StationaryPoissonProcess try: import sklearn @@ -82,7 +82,8 @@ def gen_test_data(rates, durs, shapes=(1, 1, 1, 1)): n_channels = 20 for trial in range(n_trials): rates = np.random.randint(low=1, high=100, size=n_channels) - spike_times = [homogeneous_poisson_process(rate=rate * pq.Hz) + spike_times = [StationaryPoissonProcess(rate=rate * pq.Hz, + t_stop=1000.0 * pq.ms).generate_spiketrain() for rate in rates] self.data2.append(spike_times) diff --git a/elephant/test/test_parallel.py b/elephant/test/test_parallel.py index 8aedbd1de..f8952e304 100644 --- a/elephant/test/test_parallel.py +++ b/elephant/test/test_parallel.py @@ -5,7 +5,7 @@ from numpy.testing import assert_array_almost_equal from elephant.parallel import SingleProcess, ProcessPoolExecutor -from elephant.spike_train_generation import homogeneous_poisson_process +from elephant.spike_train_generation import StationaryPoissonProcess from elephant.statistics import mean_firing_rate @@ -23,8 +23,8 @@ def setUpClass(cls): np.random.seed(28) n_spiketrains = 10 cls.spiketrains = tuple( - homogeneous_poisson_process( - rate=10 * pq.Hz, t_stop=10 * pq.s, as_array=True) + StationaryPoissonProcess( + rate=10 * pq.Hz, t_stop=10 * pq.s).generate_spiketrain() for _ in range(n_spiketrains) ) cls.mean_fr = tuple(map(mean_firing_rate, cls.spiketrains)) diff --git a/elephant/test/test_signal_processing.py b/elephant/test/test_signal_processing.py index 83e51d4f6..a0668a3b0 100644 --- a/elephant/test/test_signal_processing.py +++ b/elephant/test/test_signal_processing.py @@ -489,8 +489,8 @@ def test_butter_filter_function(self): sampling_rate=1000 * pq.Hz, units='mV', array_annotations=dict(valid=True, my_list=[0])) - kwds = {'signal': noise, 'highpass_freq': 250.0 * pq.Hz, - 'lowpass_freq': None, 'filter_function': 'filtfilt'} + kwds = {'signal': noise, 'highpass_frequency': 250.0 * pq.Hz, + 'lowpass_frequency': None, 'filter_function': 'filtfilt'} filtered_noise = elephant.signal_processing.butter(**kwds) _, psd_filtfilt = spsig.welch( filtered_noise.T, nperseg=1024, fs=1000.0, detrend=lambda x: x) @@ -517,7 +517,7 @@ def test_butter_invalid_filter_function(self): anasig_dummy = neo.AnalogSignal( np.zeros(5000), sampling_rate=1000 * pq.Hz, units='mV') # test exception upon invalid filtfunc string - kwds = {'signal': anasig_dummy, 'highpass_freq': 250.0 * pq.Hz, + kwds = {'signal': anasig_dummy, 'highpass_frequency': 250.0 * pq.Hz, 'filter_function': 'invalid_filter'} self.assertRaises( ValueError, elephant.signal_processing.butter, **kwds) @@ -527,8 +527,8 @@ def test_butter_missing_cutoff_freqs(self): anasig_dummy = neo.AnalogSignal( np.zeros(5000), sampling_rate=1000 * pq.Hz, units='mV') # test a case where no cut-off frequencies are given - kwds = {'signal': anasig_dummy, 'highpass_freq': None, - 'lowpass_freq': None} + kwds = {'signal': anasig_dummy, 'highpass_frequency': None, + 'lowpass_frequency': None} self.assertRaises( ValueError, elephant.signal_processing.butter, **kwds) @@ -649,7 +649,7 @@ def test_hilbert_pad_type_error(self): self.assertRaises( ValueError, elephant.signal_processing.hilbert, - self.long_signals, N=padding) + self.long_signals, padding=padding) def test_hilbert_output_shape(self): """ @@ -807,27 +807,30 @@ def test_wavelet_errors(self): Tests if errors are raised as expected. """ # too high center frequency - kwds = {'signal': self.test_data, 'freq': self.fs / 2} + kwds = {'signal': self.test_data, 'frequency': self.fs / 2} self.assertRaises( ValueError, elephant.signal_processing.wavelet_transform, **kwds) kwds = { 'signal': self.test_data_arr, - 'freq': self.fs / 2, - 'fs': self.fs} + 'frequency': self.fs / 2, + 'sampling_frequency': self.fs} self.assertRaises( ValueError, elephant.signal_processing.wavelet_transform, **kwds) # too high center frequency in a list - kwds = {'signal': self.test_data, 'freq': [self.fs / 10, self.fs / 2]} + kwds = {'signal': self.test_data, + 'frequency': [self.fs / 10, self.fs / 2]} self.assertRaises( ValueError, elephant.signal_processing.wavelet_transform, **kwds) kwds = {'signal': self.test_data_arr, - 'freq': [self.fs / 10, self.fs / 2], 'fs': self.fs} + 'frequency': [self.fs / 10, self.fs / 2], + 'sampling_frequency': self.fs} self.assertRaises( ValueError, elephant.signal_processing.wavelet_transform, **kwds) # nco is not positive - kwds = {'signal': self.test_data, 'freq': self.fs / 10, 'nco': 0} + kwds = {'signal': self.test_data, 'frequency': self.fs / 10, + 'n_cycles': 0} self.assertRaises( ValueError, elephant.signal_processing.wavelet_transform, **kwds) diff --git a/elephant/test/test_spectral.py b/elephant/test/test_spectral.py index de7906638..e5a6d1085 100644 --- a/elephant/test/test_spectral.py +++ b/elephant/test/test_spectral.py @@ -32,19 +32,20 @@ def test_welch_psd_errors(self): # check for invalid parameter values # - length of segments self.assertRaises(ValueError, elephant.spectral.welch_psd, data, - len_seg=0) + len_segment=0) self.assertRaises(ValueError, elephant.spectral.welch_psd, data, - len_seg=data.shape[0] * 2) + len_segment=data.shape[0] * 2) # - number of segments self.assertRaises(ValueError, elephant.spectral.welch_psd, data, - num_seg=0) + n_segments=0) self.assertRaises(ValueError, elephant.spectral.welch_psd, data, - num_seg=data.shape[0] * 2) + n_segments=data.shape[0] * 2) # - frequency resolution self.assertRaises(ValueError, elephant.spectral.welch_psd, data, - freq_res=-1) + frequency_resolution=-1) self.assertRaises(ValueError, elephant.spectral.welch_psd, data, - freq_res=data.sampling_rate / (data.shape[0] + 1)) + frequency_resolution=data.sampling_rate / + (data.shape[0] + 1)) # - overlap self.assertRaises(ValueError, elephant.spectral.welch_psd, data, overlap=-1.0) @@ -1045,19 +1046,20 @@ def test_welch_cohere_errors(self): # check for invalid parameter values # - length of segments self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, - len_seg=0) + len_segment=0) self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, - len_seg=x.shape[0] * 2) + len_segment=x.shape[0] * 2) # - number of segments self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, - num_seg=0) + n_segments=0) self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, - num_seg=x.shape[0] * 2) + n_segments=x.shape[0] * 2) # - frequency resolution self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, - freq_res=-1) + frequency_resolution=-1) self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, - freq_res=x.sampling_rate / (x.shape[0] + 1)) + frequency_resolution=x.sampling_rate / + (x.shape[0] + 1)) # - overlap self.assertRaises(ValueError, elephant.spectral.welch_coherence, x, y, overlap=-1.0) diff --git a/elephant/test/test_spike_train_correlation.py b/elephant/test/test_spike_train_correlation.py index ff4d088ca..1e7bb5948 100644 --- a/elephant/test/test_spike_train_correlation.py +++ b/elephant/test/test_spike_train_correlation.py @@ -17,6 +17,9 @@ import elephant.conversion as conv import elephant.spike_train_correlation as sc +from elephant.spike_train_generation import StationaryPoissonProcess, \ + StationaryGammaProcess +import math from elephant.datasets import download_datasets, ELEPHANT_TMP_DIR from elephant.spike_train_generation import homogeneous_poisson_process, \ homogeneous_gamma_process @@ -128,7 +131,8 @@ def test_covariance_binned_short_input(self): def test_covariance_fast_mode(self): np.random.seed(27) - st = homogeneous_poisson_process(rate=10 * pq.Hz, t_stop=10 * pq.s) + st = StationaryPoissonProcess(rate=10 * pq.Hz, t_stop=10 * pq.s + ).generate_spiketrain() binned_st = conv.BinnedSpikeTrain(st, n_bins=10) assert_array_almost_equal(sc.covariance(binned_st, fast=False), sc.covariance(binned_st, fast=True)) @@ -266,7 +270,8 @@ def test_empty_spike_train(self): def test_corrcoef_fast_mode(self): np.random.seed(27) - st = homogeneous_poisson_process(rate=10 * pq.Hz, t_stop=10 * pq.s) + st = StationaryPoissonProcess(rate=10 * pq.Hz, t_stop=10 * pq.s + ).generate_spiketrain() binned_st = conv.BinnedSpikeTrain(st, n_bins=10) assert_array_almost_equal( sc.correlation_coefficient( @@ -890,7 +895,9 @@ def test_timescale_calculation(self): np.random.seed(35) for _ in range(10): - spikes = homogeneous_gamma_process(2, 2 * nu, 0 * pq.ms, T) + spikes = StationaryGammaProcess(rate=2 * nu / 2, shape_factor=2, + t_start=0 * pq.ms, + t_stop=T).generate_spiketrain() spikes_bin = conv.BinnedSpikeTrain(spikes, bin_size) timescale_i = sc.spike_train_timescale(spikes_bin, 10 * timescale) assert_array_almost_equal(timescale, timescale_i, decimal=3) diff --git a/elephant/test/test_spike_train_dissimilarity.py b/elephant/test/test_spike_train_dissimilarity.py index 4c980ab0a..75861f79f 100644 --- a/elephant/test/test_spike_train_dissimilarity.py +++ b/elephant/test/test_spike_train_dissimilarity.py @@ -12,13 +12,13 @@ import scipy.integrate as spint from quantities import ms, s, Hz import elephant.kernels as kernels -import elephant.spike_train_generation as stg +from elephant.spike_train_generation import StationaryPoissonProcess import elephant.spike_train_dissimilarity as stds from elephant.datasets import download_datasets, ELEPHANT_TMP_DIR -class TimeScaleDependSpikeTrainDissimMeasures_TestCase(unittest.TestCase): +class TimeScaleDependSpikeTrainDissimMeasuresTestCase(unittest.TestCase): def setUp(self): self.st00 = SpikeTrain([], units='ms', t_stop=1000.0) self.st01 = SpikeTrain([1], units='ms', t_stop=1000.0) @@ -38,9 +38,15 @@ def setUp(self): self.st15 = SpikeTrain([0.01, 0.02, 0.03, 0.04, 0.05], units='s', t_stop=1000.0) self.st16 = SpikeTrain([12, 16, 28, 30, 42], units='ms', t_stop=1000.0) - self.st21 = stg.homogeneous_poisson_process(50 * Hz, 0 * ms, 1000 * ms) - self.st22 = stg.homogeneous_poisson_process(40 * Hz, 0 * ms, 1000 * ms) - self.st23 = stg.homogeneous_poisson_process(30 * Hz, 0 * ms, 1000 * ms) + self.st21 = StationaryPoissonProcess(rate=50 * Hz, t_start=0 * ms, + t_stop=1000 * ms + ).generate_spiketrain() + self.st22 = StationaryPoissonProcess(rate=40 * Hz, t_start=0 * ms, + t_stop=1000 * ms + ).generate_spiketrain() + self.st23 = StationaryPoissonProcess(rate=30 * Hz, t_start=0 * ms, + t_stop=1000 * ms + ).generate_spiketrain() self.rd_st_list = [self.st21, self.st22, self.st23] self.st31 = SpikeTrain([12.0], units='ms', t_stop=1000.0) self.st32 = SpikeTrain([12.0, 12.0], units='ms', t_stop=1000.0) diff --git a/elephant/test/test_spike_train_generation.py b/elephant/test/test_spike_train_generation.py index 2c503014e..f21048e2f 100644 --- a/elephant/test/test_spike_train_generation.py +++ b/elephant/test/test_spike_train_generation.py @@ -18,7 +18,13 @@ import quantities as pq from scipy.stats import expon, kstest, poisson, variation -import elephant.spike_train_generation as stg +from elephant.spike_train_generation import StationaryPoissonProcess, \ + threshold_detection, peak_detection, spike_extraction, \ + AbstractPointProcess, StationaryGammaProcess, StationaryLogNormalProcess, \ + NonStationaryPoissonProcess, NonStationaryGammaProcess, \ + StationaryInverseGaussianProcess, _n_poisson, single_interaction_process, \ + cpp, homogeneous_gamma_process, homogeneous_poisson_process, \ + inhomogeneous_poisson_process, inhomogeneous_gamma_process from elephant.statistics import isi, instantaneous_rate from elephant import kernels @@ -52,7 +58,7 @@ def test_threshold_detection(self): # Test whether spikes are extracted at the correct times from # an analog signal. - spike_train = stg.threshold_detection(self.vm) + spike_train = threshold_detection(self.vm) try: len(spike_train) # Handles an error in Neo related to some zero length @@ -77,7 +83,7 @@ def test_threshold_detection(self): def test_peak_detection_threshold(self): # Test for empty SpikeTrain when threshold is too high - result = stg.threshold_detection(self.vm, threshold=30 * pq.mV) + result = threshold_detection(self.vm, threshold=30 * pq.mV) self.assertEqual(len(result), 0) @@ -98,7 +104,7 @@ def setUp(self): def test_peak_detection_time_stamps(self): # Test with default arguments - result = stg.peak_detection(self.vm) + result = peak_detection(self.vm) self.assertEqual(len(self.true_time_stamps), len(result)) self.assertIsInstance(result, neo.core.SpikeTrain) @@ -109,7 +115,7 @@ def test_peak_detection_time_stamps(self): def test_peak_detection_threshold(self): # Test for empty SpikeTrain when threshold is too high - result = stg.peak_detection(self.vm, threshold=30 * pq.mV) + result = peak_detection(self.vm, threshold=30 * pq.mV) self.assertEqual(len(result), 0) @@ -137,8 +143,8 @@ def setUp(self): -0.06715259, -0.06703235, -0.06691635]) def test_spike_extraction_waveform(self): - spike_train = stg.spike_extraction(self.vm.reshape(-1), - interval=(-1 * pq.ms, 2 * pq.ms)) + spike_train = spike_extraction(self.vm.reshape(-1), + interval=(-1 * pq.ms, 2 * pq.ms)) assert_array_almost_equal( spike_train.waveforms[0][0].magnitude.reshape(-1), @@ -147,7 +153,7 @@ def test_spike_extraction_waveform(self): class AbstractPointProcessTestCase(unittest.TestCase): def test_not_implemented_error(self): - process = stg.AbstractPointProcess() + process = AbstractPointProcess() self.assertRaises( NotImplementedError, process._generate_spiketrain_as_array) @@ -162,12 +168,12 @@ def test_statistics(self): for t_stop in [2345 * pq.ms, 2.345 * pq.s]: for refractory_period in (None, 3. * pq.ms): np.random.seed(seed=123456) - spiketrain_old = stg.homogeneous_poisson_process( + spiketrain_old = homogeneous_poisson_process( rate, t_stop=t_stop, refractory_period=refractory_period) np.random.seed(seed=123456) - spiketrain = stg.StationaryPoissonProcess( + spiketrain = StationaryPoissonProcess( rate, t_stop=t_stop, refractory_period=refractory_period, equilibrium=False @@ -224,11 +230,11 @@ def test_zero_refractory_period(self): t_stop = 20 * pq.s np.random.seed(27) - sp1 = stg.StationaryPoissonProcess( + sp1 = StationaryPoissonProcess( rate, t_stop=t_stop).generate_spiketrain(as_array=True) np.random.seed(27) - sp2 = stg.StationaryPoissonProcess( + sp2 = StationaryPoissonProcess( rate, t_stop=t_stop, refractory_period=0. * pq.ms ).generate_spiketrain( as_array=True) @@ -240,10 +246,10 @@ def test_t_start_and_t_stop(self): t_start = 17 * pq.ms t_stop = 2 * pq.s - sp1 = stg.StationaryPoissonProcess( + sp1 = StationaryPoissonProcess( rate, t_start=t_start, t_stop=t_stop).generate_spiketrain() - sp2 = stg.StationaryPoissonProcess( + sp2 = StationaryPoissonProcess( rate, t_start=t_start, t_stop=t_stop, refractory_period=3 * pq.ms ).generate_spiketrain() @@ -257,7 +263,7 @@ def test_zero_rate(self): warnings.simplefilter("ignore") # RuntimeWarning: divide by zero encountered in true_divide # mean_interval = 1 / rate.magnitude, when rate == 0 Hz. - spiketrain = stg.StationaryPoissonProcess( + spiketrain = StationaryPoissonProcess( rate=0 * pq.Hz, t_stop=10 * pq.s, refractory_period=refractory_period).generate_spiketrain() self.assertEqual(spiketrain.size, 0) @@ -266,7 +272,7 @@ def test_nondecrease_spike_times(self): for refractory_period in (None, 3 * pq.ms): np.random.seed(27) - spiketrain = stg.StationaryPoissonProcess( + spiketrain = StationaryPoissonProcess( rate=10 * pq.Hz, t_stop=1000 * pq.s, refractory_period=refractory_period).generate_spiketrain() diffs = np.diff(spiketrain.times) @@ -276,7 +282,7 @@ def test_compare_with_as_array(self): rate = 10 * pq.Hz t_stop = 10 * pq.s for refractory_period in (None, 3 * pq.ms): - process = stg.StationaryPoissonProcess( + process = StationaryPoissonProcess( rate=rate, t_stop=t_stop, refractory_period=refractory_period) np.random.seed(27) spiketrain = process.generate_spiketrain() @@ -292,7 +298,7 @@ def test_effective_rate_refractory_period(self): np.random.seed(27) rate_expected = 10 * pq.Hz refractory_period = 90 * pq.ms # 10 ms of effective ISI - spiketrain = stg.StationaryPoissonProcess( + spiketrain = StationaryPoissonProcess( rate_expected, t_stop=1000 * pq.s, refractory_period=refractory_period ).generate_spiketrain() @@ -310,7 +316,7 @@ def test_invalid(self): for refractory_period in (None, 3 * pq.ms): # t_stop < t_start - hpp = stg.StationaryPoissonProcess + hpp = StationaryPoissonProcess self.assertRaises( ValueError, hpp, rate=rate, t_start=5 * pq.ms, t_stop=1 * pq.ms, refractory_period=refractory_period) @@ -321,7 +327,7 @@ def test_invalid(self): refractory_period=refractory_period) # no units provided for refractory_period self.assertRaises(ValueError, hpp, rate=rate, refractory_period=2) - self.assertRaises(ValueError, stg.StationaryPoissonProcess, + self.assertRaises(ValueError, StationaryPoissonProcess, rate, refractory_period=1. * pq.s) @@ -335,10 +341,10 @@ def test_statistics(self): for b in (67.0 * pq.Hz, 0.067 * pq.kHz): for t_stop in (2345 * pq.ms, 2.345 * pq.s): np.random.seed(seed=12345) - spiketrain_old = stg.homogeneous_gamma_process( + spiketrain_old = homogeneous_gamma_process( a, b, t_stop=t_stop) np.random.seed(seed=12345) - spiketrain = stg.StationaryGammaProcess( + spiketrain = StationaryGammaProcess( rate=b / a, shape_factor=a, t_stop=t_stop, equilibrium=False ).generate_spiketrain() @@ -377,12 +383,12 @@ def test_compare_with_as_array(self): a = 3. b = 10 * pq.Hz np.random.seed(27) - spiketrain = stg.StationaryGammaProcess( + spiketrain = StationaryGammaProcess( rate=b / a, shape_factor=a, equilibrium=False).generate_spiketrain() self.assertIsInstance(spiketrain, neo.SpikeTrain) np.random.seed(27) - spiketrain_array = stg.StationaryGammaProcess( + spiketrain_array = StationaryGammaProcess( rate=b / a, shape_factor=a, equilibrium=False).generate_spiketrain( as_array=True) # don't check with isinstance: pq.Quantity is a subclass of np.ndarray @@ -400,7 +406,7 @@ def test_statistics(self): for rate in (67.0 * pq.Hz, 0.067 * pq.kHz): for t_stop in (2345 * pq.ms, 2.345 * pq.s): np.random.seed(seed=123456) - spiketrain = stg.StationaryLogNormalProcess( + spiketrain = StationaryLogNormalProcess( rate=rate, sigma=sigma, t_stop=t_stop, equilibrium=False ).generate_spiketrain() @@ -440,12 +446,12 @@ def test_compare_with_as_array(self): sigma = 1.2 rate = 10 * pq.Hz np.random.seed(27) - spiketrain = stg.StationaryLogNormalProcess( + spiketrain = StationaryLogNormalProcess( rate=rate, sigma=sigma, equilibrium=False).generate_spiketrain() self.assertIsInstance(spiketrain, neo.SpikeTrain) np.random.seed(27) - spiketrain_array = stg.StationaryLogNormalProcess( + spiketrain_array = StationaryLogNormalProcess( rate=rate, sigma=sigma, equilibrium=False ).generate_spiketrain( as_array=True) @@ -464,7 +470,7 @@ def test_statistics(self): for rate in (67.0 * pq.Hz, 0.067 * pq.kHz): for t_stop in (2345 * pq.ms, 2.345 * pq.s): np.random.seed(seed=123456) - spiketrain = stg.StationaryInverseGaussianProcess( + spiketrain = StationaryInverseGaussianProcess( rate=rate, cv=cv, t_stop=t_stop, equilibrium=False ).generate_spiketrain() @@ -504,11 +510,11 @@ def test_compare_with_as_array(self): cv = 1.2 rate = 10 * pq.Hz np.random.seed(27) - spiketrain = stg.StationaryInverseGaussianProcess( + spiketrain = StationaryInverseGaussianProcess( rate=rate, cv=cv, equilibrium=False).generate_spiketrain() self.assertIsInstance(spiketrain, neo.SpikeTrain) np.random.seed(27) - spiketrain_array = stg.StationaryInverseGaussianProcess( + spiketrain_array = StationaryInverseGaussianProcess( rate=rate, cv=cv, equilibrium=False).generate_spiketrain( as_array=True) # don't check with isinstance: pq.Quantity is a subclass of np.ndarray @@ -524,60 +530,60 @@ def setUp(self): self.n_spiketrains = 10 # can only have CV equal to 1. - self.poisson_process = stg.StationaryPoissonProcess( + self.poisson_process = StationaryPoissonProcess( rate=self.rate, t_stop=self.t_stop) # choose all further processes to have CV of 1/2 # CV = 1 - rate * refractory_period - self.poisson_refractory_period_ordinary = stg.StationaryPoissonProcess( + self.poisson_refractory_period_ordinary = StationaryPoissonProcess( rate=self.rate, refractory_period=0.5 / self.rate, t_stop=self.t_stop, equilibrium=False) self.poisson_refractory_period_equilibrium = \ - stg.StationaryPoissonProcess( + StationaryPoissonProcess( rate=self.rate, refractory_period=0.5 / self.rate, t_stop=self.t_stop, equilibrium=True) # CV = 1 / sqrt(shape_factor) - self.gamma_process_ordinary = stg.StationaryGammaProcess( + self.gamma_process_ordinary = StationaryGammaProcess( rate=self.rate, shape_factor=4, t_stop=self.t_stop, equilibrium=False) - self.gamma_process_equilibrium = stg.StationaryGammaProcess( + self.gamma_process_equilibrium = StationaryGammaProcess( rate=self.rate, shape_factor=4, t_stop=self.t_stop, equilibrium=True) # CV = sqrt(exp(sigma**2) - 1) - self.log_normal_process_ordinary = stg.StationaryLogNormalProcess( + self.log_normal_process_ordinary = StationaryLogNormalProcess( rate=self.rate, sigma=np.sqrt(np.log(5. / 4.)), t_stop=self.t_stop, equilibrium=False) - self.log_normal_process_equilibrium = stg.StationaryLogNormalProcess( + self.log_normal_process_equilibrium = StationaryLogNormalProcess( rate=self.rate, sigma=np.sqrt(np.log(5. / 4.)), t_stop=self.t_stop, equilibrium=True) self.inverse_gaussian_process_ordinary = \ - stg.StationaryInverseGaussianProcess( + StationaryInverseGaussianProcess( rate=self.rate, cv=1 / 2, t_stop=self.t_stop, equilibrium=False) self.inverse_gaussian_process_equilibrium = \ - stg.StationaryInverseGaussianProcess( + StationaryInverseGaussianProcess( rate=self.rate, cv=1 / 2, t_stop=self.t_stop, @@ -662,11 +668,11 @@ def test_statistics(self): for rate in (self.rate_profile, self.rate_profile.rescale(pq.kHz)): for refractory_period in (3 * pq.ms, None): np.random.seed(seed=12345) - spiketrain_old = stg.inhomogeneous_poisson_process( + spiketrain_old = inhomogeneous_poisson_process( rate, refractory_period=refractory_period) np.random.seed(seed=12345) - process = stg.NonStationaryPoissonProcess + process = NonStationaryPoissonProcess spiketrain = process(rate, refractory_period=refractory_period ).generate_spiketrain() @@ -698,16 +704,16 @@ def test_statistics(self): self.assertEqual(rate.t_start, spiketrain.t_start) # Testing type - spiketrain_as_array = stg.NonStationaryPoissonProcess( + spiketrain_as_array = NonStationaryPoissonProcess( rate).generate_spiketrain(as_array=True) self.assertTrue(isinstance(spiketrain_as_array, np.ndarray)) self.assertTrue(isinstance(spiketrain, neo.SpikeTrain)) # Testing type for refractory period refractory_period = 3 * pq.ms - spiketrain = stg.NonStationaryPoissonProcess( + spiketrain = NonStationaryPoissonProcess( rate, refractory_period=refractory_period).generate_spiketrain() - spiketrain_as_array = stg.NonStationaryPoissonProcess( + spiketrain_as_array = NonStationaryPoissonProcess( rate, refractory_period=refractory_period).generate_spiketrain( as_array=True) self.assertTrue(isinstance(spiketrain_as_array, np.ndarray)) @@ -715,7 +721,7 @@ def test_statistics(self): # Check that to high refractory period raises error self.assertRaises( - ValueError, stg.NonStationaryPoissonProcess, + ValueError, NonStationaryPoissonProcess, self.rate_profile, refractory_period=1000 * pq.ms) @@ -725,7 +731,7 @@ def test_effective_rate_refractory_period(self): refractory_period = 90 * pq.ms # 10 ms of effective ISI rates = neo.AnalogSignal(np.repeat(rate_expected, 1000), units=pq.Hz, t_start=0 * pq.ms, sampling_rate=1 * pq.Hz) - spiketrain = stg.NonStationaryPoissonProcess( + spiketrain = NonStationaryPoissonProcess( rates, refractory_period=refractory_period).generate_spiketrain() rate_obtained = len(spiketrain) / spiketrain.t_stop self.assertAlmostEqual( @@ -739,18 +745,18 @@ def test_effective_rate_refractory_period(self): def test_zero_rate(self): for refractory_period in (3 * pq.ms, None): - process = stg.NonStationaryPoissonProcess + process = NonStationaryPoissonProcess spiketrain = process( self.rate_profile_0, refractory_period=refractory_period ).generate_spiketrain() self.assertEqual(spiketrain.size, 0) self.assertRaises( - ValueError, stg.NonStationaryPoissonProcess, + ValueError, NonStationaryPoissonProcess, self.rate_profile, refractory_period=5) def test_negative_rates(self): for refractory_period in (3 * pq.ms, None): - process = stg.NonStationaryPoissonProcess + process = NonStationaryPoissonProcess self.assertRaises( ValueError, process, self.rate_profile_negative, @@ -777,10 +783,10 @@ def test_statistics(self): for rate in [self.rate_profile, self.rate_profile.rescale(pq.kHz)]: np.random.seed(seed=12345) - spiketrain_old = stg.inhomogeneous_gamma_process( + spiketrain_old = inhomogeneous_gamma_process( rate, shape_factor=shape_factor) np.random.seed(seed=12345) - spiketrain = stg.NonStationaryGammaProcess( + spiketrain = NonStationaryGammaProcess( rate, shape_factor=shape_factor).generate_spiketrain() assert_allclose(spiketrain_old.magnitude, spiketrain.magnitude) @@ -805,7 +811,7 @@ def test_statistics(self): self.assertEqual(rate.t_start, spiketrain.t_start) # Testing type - spiketrain_as_array = stg.NonStationaryGammaProcess( + spiketrain_as_array = NonStationaryGammaProcess( rate, shape_factor=shape_factor).generate_spiketrain( as_array=True) self.assertTrue(isinstance(spiketrain_as_array, np.ndarray)) @@ -813,20 +819,20 @@ def test_statistics(self): # check error if rate has wrong format self.assertRaises( - ValueError, stg.NonStationaryGammaProcess, + ValueError, NonStationaryGammaProcess, rate_signal=[0.1, 2.], shape_factor=shape_factor) # check error if negative values in rate self.assertRaises( - ValueError, stg.NonStationaryGammaProcess, + ValueError, NonStationaryGammaProcess, rate_signal=neo.AnalogSignal( [-0.1, 10.] * pq.Hz, sampling_period=0.001 * pq.s), shape_factor=shape_factor) # check error if rate is empty self.assertRaises( - ValueError, stg.NonStationaryGammaProcess, + ValueError, NonStationaryGammaProcess, rate_signal=neo.AnalogSignal( [] * pq.Hz, sampling_period=0.001 * pq.s), shape_factor=shape_factor) @@ -852,7 +858,7 @@ def test_recovered_firing_rate_profile(self): # factor: the method supports float values that is not trivial # for inhomogeneous gamma process generation for shape_factor in (2.5, 10.): - spiketrains = stg.NonStationaryGammaProcess( + spiketrains = NonStationaryGammaProcess( rate_profile, shape_factor=shape_factor ).generate_n_spiketrains(n_trials) rate_recovered = instantaneous_rate( @@ -880,7 +886,7 @@ def setUp(self): def test_poisson(self): # Check the output types for input rate + n number of neurons - pp = stg._n_poisson( + pp = _n_poisson( rate=self.rate, t_stop=self.t_stop, n_spiketrains=self.n) @@ -890,7 +896,7 @@ def test_poisson(self): self.assertEqual(len(pp), self.n) # Check the output types for input list of rates - pp = stg._n_poisson(rate=self.rates, t_stop=self.t_stop) + pp = _n_poisson(rate=self.rates, t_stop=self.t_stop) self.assertIsInstance(pp, list) self.assertIsInstance(pp[0], neo.core.spiketrain.SpikeTrain) self.assertEqual(pp[0].simplified.units, 1000 * pq.ms) @@ -899,21 +905,21 @@ def test_poisson(self): def test_poisson_error(self): # Dimensionless rate self.assertRaises( - ValueError, stg._n_poisson, rate=5, t_stop=self.t_stop) + ValueError, _n_poisson, rate=5, t_stop=self.t_stop) # Negative rate self.assertRaises( - ValueError, stg._n_poisson, rate=-5 * pq.Hz, t_stop=self.t_stop) + ValueError, _n_poisson, rate=-5 * pq.Hz, t_stop=self.t_stop) # Negative value when rate is a list self.assertRaises( - ValueError, stg._n_poisson, rate=[-5, 3] * pq.Hz, + ValueError, _n_poisson, rate=[-5, 3] * pq.Hz, t_stop=self.t_stop) # Negative n self.assertRaises( - ValueError, stg._n_poisson, rate=self.rate, t_stop=self.t_stop, + ValueError, _n_poisson, rate=self.rate, t_stop=self.t_stop, n_spiketrains=-1) # t_start>t_stop self.assertRaises( - ValueError, stg._n_poisson, rate=self.rate, t_start=4 * pq.ms, + ValueError, _n_poisson, rate=self.rate, t_start=4 * pq.ms, t_stop=3 * pq.ms, n_spiketrains=3) @@ -937,7 +943,7 @@ def format_check(self, sip, coinc): def test_sip(self): # Generate an example SIP mode - sip, coinc = stg.single_interaction_process( + sip, coinc = single_interaction_process( n_spiketrains=self.n, t_stop=self.t_stop, rate=self.rate, coincidence_rate=self.rate_c, return_coincidences=True) @@ -949,7 +955,7 @@ def test_sip(self): with warnings.catch_warnings(): warnings.simplefilter("ignore") # Generate an example SIP mode giving a list of rates as imput - sip, coinc = stg.single_interaction_process( + sip, coinc = single_interaction_process( t_stop=self.t_stop, rate=self.rates, coincidence_rate=self.rate_c, return_coincidences=True) @@ -960,7 +966,7 @@ def test_sip(self): (self.rate_c * self.t_stop).rescale(pq.dimensionless)) # Generate an example SIP mode stochastic number of coincidences - sip = stg.single_interaction_process( + sip = single_interaction_process( n_spiketrains=self.n, t_stop=self.t_stop, rate=self.rate, @@ -976,26 +982,26 @@ def test_sip(self): def test_sip_error(self): # Negative rate self.assertRaises( - ValueError, stg.single_interaction_process, n_spiketrains=self.n, + ValueError, single_interaction_process, n_spiketrains=self.n, rate=-5 * pq.Hz, coincidence_rate=self.rate_c, t_stop=self.t_stop) # Negative coincidence rate self.assertRaises( - ValueError, stg.single_interaction_process, n_spiketrains=self.n, + ValueError, single_interaction_process, n_spiketrains=self.n, rate=self.rate, coincidence_rate=-3 * pq.Hz, t_stop=self.t_stop) # Negative value when rate is a list self.assertRaises( - ValueError, stg.single_interaction_process, n_spiketrains=self.n, + ValueError, single_interaction_process, n_spiketrains=self.n, rate=[-5, 3, 4, 2] * pq.Hz, coincidence_rate=self.rate_c, t_stop=self.t_stop) # Negative n self.assertRaises( - ValueError, stg.single_interaction_process, n_spiketrains=-1, + ValueError, single_interaction_process, n_spiketrains=-1, rate=self.rate, coincidence_rate=self.rate_c, t_stop=self.t_stop) # Rate_c < rate self.assertRaises( ValueError, - stg.single_interaction_process, + single_interaction_process, n_spiketrains=self.n, rate=self.rate, coincidence_rate=self.rate + 1 * pq.Hz, @@ -1026,8 +1032,8 @@ def test_cpp_hom(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = 3 * pq.Hz - cpp_hom = stg.cpp(rate, amplitude_distribution, - t_stop, t_start=t_start) + cpp_hom = cpp(rate, amplitude_distribution, + t_stop, t_start=t_start) # testing the output formats self.format_check(cpp_hom, amplitude_distribution, t_start, t_stop) @@ -1035,8 +1041,8 @@ def test_cpp_hom(self): t_stop = 10000 * pq.ms t_start = 5 * 1000 * pq.ms rate = 3 * pq.Hz - cpp_unit = stg.cpp(rate, amplitude_distribution, - t_stop, t_start=t_start) + cpp_unit = cpp(rate, amplitude_distribution, + t_stop, t_start=t_start) self.assertEqual(cpp_unit[0].units, t_stop.units) self.assertEqual(cpp_unit[0].t_stop.units, t_stop.units) @@ -1047,7 +1053,7 @@ def test_cpp_hom(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = 3 * pq.Hz - cpp_hom_empty = stg.cpp( + cpp_hom_empty = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertEqual( @@ -1058,7 +1064,7 @@ def test_cpp_hom(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = 0 * pq.Hz - cpp_hom_empty_r = stg.cpp( + cpp_hom_empty_r = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertEqual( [len(train) for train in cpp_hom_empty_r], [0] * len( @@ -1069,7 +1075,7 @@ def test_cpp_hom(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = 3 * pq.Hz - cpp_hom_eq = stg.cpp( + cpp_hom_eq = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertTrue( @@ -1080,7 +1086,7 @@ def test_cpp_hom_errors(self): # testing empty amplitude self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[], t_stop=10 * 1000 * pq.ms, rate=3 * pq.Hz) @@ -1088,13 +1094,13 @@ def test_cpp_hom_errors(self): # testing sum of amplitude>1 self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[1, 1, 1], t_stop=10 * 1000 * pq.ms, rate=3 * pq.Hz) # testing negative value in the amplitude self.assertRaises( - ValueError, stg.cpp, amplitude_distribution=[-1, 1, 1], + ValueError, cpp, amplitude_distribution=[-1, 1, 1], t_stop=10 * 1000 * pq.ms, rate=3 * pq.Hz) # test negative rate @@ -1103,13 +1109,13 @@ def test_cpp_hom_errors(self): # Catches RuntimeWarning: invalid value encountered in sqrt # number = np.ceil(n + 3 * np.sqrt(n)), when `n` == -3 Hz. self.assertRaises( - ValueError, stg.cpp, amplitude_distribution=[0, 1, 0], + ValueError, cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=-3 * pq.Hz) # test wrong unit for rate self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=3 * 1000 * pq.ms) @@ -1117,19 +1123,19 @@ def test_cpp_hom_errors(self): # testing raises of AttributeError (missing input units) # Testing missing unit to t_stop self.assertRaises( - ValueError, stg.cpp, amplitude_distribution=[0, 1, 0], t_stop=10, + ValueError, cpp, amplitude_distribution=[0, 1, 0], t_stop=10, rate=3 * pq.Hz) # Testing missing unit to t_start self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=3 * pq.Hz, t_start=3) # testing rate missing unit self.assertRaises( - AttributeError, stg.cpp, amplitude_distribution=[0, 1, 0], + AttributeError, cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=3) @@ -1143,8 +1149,8 @@ def test_cpp_het(self): warnings.simplefilter("ignore") # Catch RuntimeWarning: divide by zero encountered in true_divide # mean_interval = 1 / rate.magnitude, when rate == 0 Hz. - cpp_het = stg.cpp(rate, amplitude_distribution, - t_stop, t_start=t_start) + cpp_het = cpp(rate, amplitude_distribution, + t_stop, t_start=t_start) # testing the output formats self.format_check(cpp_het, amplitude_distribution, t_start, t_stop) self.assertEqual(len(cpp_het), len(rate)) @@ -1153,7 +1159,7 @@ def test_cpp_het(self): t_stop = 10000 * pq.ms t_start = 5 * 1000 * pq.ms rate = [3, 4] * pq.Hz - cpp_unit = stg.cpp( + cpp_unit = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertEqual(cpp_unit[0].units, t_stop.units) @@ -1164,7 +1170,7 @@ def test_cpp_het(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = [3, 4] * pq.Hz - cpp_het_empty = stg.cpp( + cpp_het_empty = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertEqual(len(cpp_het_empty[0]), 0) @@ -1174,7 +1180,7 @@ def test_cpp_het(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = [0, 0] * pq.Hz - cpp_het_empty_r = stg.cpp( + cpp_het_empty_r = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertEqual( [len(train) for train in cpp_het_empty_r], [0] * len( @@ -1185,7 +1191,7 @@ def test_cpp_het(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = [3, 3] * pq.Hz - cpp_het_eq = stg.cpp( + cpp_het_eq = cpp( rate, amplitude_distribution, t_stop, t_start=t_start) self.assertTrue(np.allclose( @@ -1196,57 +1202,57 @@ def test_cpp_het_err(self): # testing empty amplitude self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz) # testing sum amplitude>1 self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[1, 1, 1], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz) # testing amplitude negative value self.assertRaises( - ValueError, stg.cpp, amplitude_distribution=[-1, 1, 1], + ValueError, cpp, amplitude_distribution=[-1, 1, 1], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz) # testing negative rate - self.assertRaises(ValueError, stg.cpp, amplitude_distribution=[ + self.assertRaises(ValueError, cpp, amplitude_distribution=[ 0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=[-3, 4] * pq.Hz) # testing empty rate self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=[] * pq.Hz) # testing empty amplitude self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz) # testing different len(A)-1 and len(rate) self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 1], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz) # testing rate with different unit from Hz self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 1], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * 1000 * pq.ms) # Testing analytical constrain between amplitude and rate self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 0, 1], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz, @@ -1255,19 +1261,19 @@ def test_cpp_het_err(self): # testing raises of AttributeError (missing input units) # Testing missing unit to t_stop self.assertRaises( - ValueError, stg.cpp, amplitude_distribution=[0, 1, 0], t_stop=10, + ValueError, cpp, amplitude_distribution=[0, 1, 0], t_stop=10, rate=[3, 4] * pq.Hz) # Testing missing unit to t_start self.assertRaises( ValueError, - stg.cpp, + cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=[3, 4] * pq.Hz, t_start=3) # Testing missing unit to rate self.assertRaises( - AttributeError, stg.cpp, amplitude_distribution=[0, 1, 0], + AttributeError, cpp, amplitude_distribution=[0, 1, 0], t_stop=10 * 1000 * pq.ms, rate=[3, 4]) @@ -1277,7 +1283,7 @@ def test_cpp_jttered(self): t_stop = 10 * 1000 * pq.ms t_start = 5 * 1000 * pq.ms rate = 3 * pq.Hz - cpp_shift = stg.cpp( + cpp_shift = cpp( rate, amplitude_distribution, t_stop, diff --git a/elephant/unitary_event_analysis.py b/elephant/unitary_event_analysis.py index b9fdf1503..e93e6f81d 100644 --- a/elephant/unitary_event_analysis.py +++ b/elephant/unitary_event_analysis.py @@ -60,7 +60,7 @@ import scipy import elephant.conversion as conv -from elephant.utils import is_binary, deprecated_alias +from elephant.utils import is_binary __all__ = [ "hash_from_pattern", @@ -649,7 +649,6 @@ def _bintime(t, bin_size): return np.floor(np.array(t_dl) / bin_size_dl).astype(int) -@deprecated_alias(winsize='win_size', winstep='win_step') def _winpos(t_start, t_stop, win_size, win_step, position='left-edge'): """ Calculate the position of the analysis window. @@ -690,8 +689,6 @@ def _UE(mat, pattern_hash, method='analytic_TrialByTrial', n_surrogates=1): return Js, rate_avg, n_exp, n_emp, indices -@deprecated_alias(data='spiketrains', binsize='bin_size', winsize='win_size', - winstep='win_step', n_surr='n_surrogates') def jointJ_window_analysis(spiketrains, bin_size=5 * pq.ms, win_size=100 * pq.ms, win_step=5 * pq.ms, pattern_hash=None, method='analytic_TrialByTrial', From 11bd23ad81e7069f57c052982dd50f72d4de6b58 Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Fri, 10 Nov 2023 18:43:51 +0100 Subject: [PATCH 3/4] Release 1.0.0 (#601) * bump version number to 1.0.0 * update .zenodo.json * update codemeta.json * fix typo * update authors * update authors * add release notes --- .zenodo.json | 12 +----------- codemeta.json | 24 ++---------------------- doc/release_notes.rst | 14 ++++++++++++++ elephant/VERSION | 2 +- 4 files changed, 18 insertions(+), 34 deletions(-) diff --git a/.zenodo.json b/.zenodo.json index 39c82453b..5f12a3a85 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -5,24 +5,14 @@ "affiliation": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany", "name": "Denker, Michael" }, - { - "orcid": "0000-0003-0503-5264", - "affiliation": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany", - "name": "Köhler, Cristiano" - }, { "orcid": "0000-0001-7292-1982", "affiliation": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany", "name": "Kern, Moritz" - }, - { - "orcid": "0000-0003-3776-4226", - "affiliation": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany", - "name": "Kleinjohann, Alexander" } ], - "title": "Elephant 0.14.0", + "title": "Elephant 1.0.0", "keywords": [ "neuroscience", diff --git a/codemeta.json b/codemeta.json index 25fe3450c..ac6628943 100644 --- a/codemeta.json +++ b/codemeta.json @@ -6,11 +6,11 @@ "contIntegration": "https://github.com/NeuralEnsemble/elephant/actions", "dateCreated": "2013-17-15", "datePublished": "2015-04-08", - "dateModified": "2023-10-31", + "dateModified": "2023-11-10", "downloadUrl": "https://files.pythonhosted.org/packages/aa/e5/a42131ffa1de8e379ba56d67c85824d2471e6fbedcf710283f589c0dd4a4/elephant-0.13.0.tar.gz", "issueTracker": "https://github.com/NeuralEnsemble/elephant/issues", "name": "Elephant", - "version": "0.14.0", + "version": "1.0.0", "identifier": "https://doi.org/10.5281/zenodo.1186602", "description": "Elephant (Electrophysiology Analysis Toolkit) is an open-source, community centered library for the analysis of electrophysiological data in the Python programming language. The focus of Elephant is on generic analysis functions for spike train data and time series recordings from electrodes, such as the local field potentials (LFP) or intracellular voltages.In addition to providing a common platform for analysis code from different laboratories, the Elephant project aims to provide a consistent and homogeneous analysis framework that is built on a modular foundation. \nElephant is the direct successor to Neurotools and maintains ties to complementary projects such as OpenElectrophy and spykeviewer.", "applicationCategory": "library", @@ -51,16 +51,6 @@ "name": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany" } }, - { - "@type": "Person", - "@id": "https://orcid.org/0000-0003-0503-5264", - "givenName": "Cristiano", - "familyName": "Köhler", - "affiliation": { - "@type": "Organization", - "name": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany" - } - }, { "@type": "Person", "@id": "https://orcid.org/0000-0001-7292-1982", @@ -70,16 +60,6 @@ "@type": "Organization", "name": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany" } - }, - { - "@type": "Person", - "@id": "https://orcid.org/0000-0003-3776-4226", - "givenName": "Alexander", - "familyName": "Kleinjohann", - "affiliation": { - "@type": "Organization", - "name": "Institute of Neuroscience and Medicine (INM-6) and Institute for Advanced Simulation (IAS-6) and JARA-Institute Brain Structure-Function Relationships (INM-10), Jülich Research Centre, Jülich, Germany" - } } ] } diff --git a/doc/release_notes.rst b/doc/release_notes.rst index fa30c79ad..e2fec7ffd 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -3,6 +3,20 @@ Release Notes ============= +Release 1.0.0 +============= +Elephant's first major release is focused on providing a stable and consistent API consistency that will be maintained over the 1.x series of releases. In order to provide future support, this release will remove all features and API specifications that have been deprecated over the course of the last releases of the 0.x line. While work on the next generation of Elephant will commence, all new analysis capabilities will be consistently back-ported to become available in the 1.x release line. + +Breaking changes +---------------- +* Removed deprecated features and naming introduced in #316 with Elephant release v0.8.0 (#488). +* Removed the `pandas_bridge` module from Elephant in line with the deprecation plan introduced with Elephant v0.7.0 (#530). + +Selected dependency changes +--------------------------- +* removed pandas from the dependencies (#530). + + Elephant 0.14.0 release notes ============================= diff --git a/elephant/VERSION b/elephant/VERSION index ac32ff029..3eefcb9dd 100644 --- a/elephant/VERSION +++ b/elephant/VERSION @@ -1 +1 @@ -0.15.0b1 +1.0.0 From 77274f68dd5d69cfe5274bc3fd11c915501115ad Mon Sep 17 00:00:00 2001 From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com> Date: Fri, 10 Nov 2023 19:03:09 +0100 Subject: [PATCH 4/4] Housekeeepin release (#602) * bump version number to 1.0.0 * update .zenodo.json * update codemeta.json * fix typo * update authors * update authors * add release notes * typo codemeta.json --- codemeta.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/codemeta.json b/codemeta.json index ac6628943..6d10f256a 100644 --- a/codemeta.json +++ b/codemeta.json @@ -14,7 +14,7 @@ "identifier": "https://doi.org/10.5281/zenodo.1186602", "description": "Elephant (Electrophysiology Analysis Toolkit) is an open-source, community centered library for the analysis of electrophysiological data in the Python programming language. The focus of Elephant is on generic analysis functions for spike train data and time series recordings from electrodes, such as the local field potentials (LFP) or intracellular voltages.In addition to providing a common platform for analysis code from different laboratories, the Elephant project aims to provide a consistent and homogeneous analysis framework that is built on a modular foundation. \nElephant is the direct successor to Neurotools and maintains ties to complementary projects such as OpenElectrophy and spykeviewer.", "applicationCategory": "library", - "releaseNotes": "https://github.com/NeuralEnsemble/elephant/releases/tag/v0.14.0", + "releaseNotes": "https://github.com/NeuralEnsemble/elephant/releases/tag/v1.0.0", "funding": "EU Grant 604102 (HBP), EU Grant 720270(HBP), EU Grant 785907(HBP), EU Grant 945539(HBP)", "developmentStatus": "active", "keywords": [