Skip to content

Commit

Permalink
Merge branch 'release/0.4.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
wmayner committed Feb 24, 2015
2 parents 45e2c0d + 922f62a commit 61e15f2
Show file tree
Hide file tree
Showing 13 changed files with 303 additions and 146 deletions.
18 changes: 8 additions & 10 deletions INSTALLATION.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,9 @@ another, so that they don't interact in unexpected ways. Please see [this
guide](http://docs.python-guide.org/en/latest/dev/virtualenvs/) for more
information.

To do this, you must install `virtualenv` and `virtualenvwrapper`, a [tool for
manipulating virtual
environments](http://virtualenvwrapper.readthedocs.org/en/latest/). Both of
those tools are available on [PyPI](https://pypi.python.org/pypi), the Python
To do this, you must install `virtualenvwrapper`, a [tool for manipulating
virtual environments](http://virtualenvwrapper.readthedocs.org/en/latest/).
This tool is available on [PyPI](https://pypi.python.org/pypi), the Python
package index, and can be installed with `pip`, the command-line utility for
installing and managing Python packages (`pip` was installed automatically with
the brewed Python):
Expand Down Expand Up @@ -73,8 +72,8 @@ development project directories, and the location of the script installed with
this package, respectively. **Note:** The location of the script can be found
by running `which virtualenvwrapper.sh`.

The filepath after the equals sign on second line will different for everyone,
but here is an example:
The filepath after the equals sign on the second line will different for
everyone, but here is an example:

```bash
export WORKON_HOME=$HOME/.virtualenvs
Expand All @@ -93,7 +92,7 @@ virtual environment, like so:
mkvirtualenv -p `which python3` <name_of_your_project>
```

The `` -p `which python3 ``\` option ensures that when the virtual environment
The option `` -p `which python3 ``\` ensures that when the virtual environment
is activated, the commands `python` and `pip` will refer to their Python 3
counterparts.

Expand Down Expand Up @@ -124,6 +123,5 @@ import pyphi
```

Please see the documentation for some
[examples](http://pythonhosted.org/pyphi/#usage-and-examples) and information
on how to [configure](http://pythonhosted.org/pyphi/#configuration-optional)
it.
[examples](http://pythonhosted.org/pyphi/#examples) and information on how to
[configure](http://pythonhosted.org/pyphi/#configuration) it.
2 changes: 0 additions & 2 deletions docs/examples/basic.rst
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,6 @@ The documentation for :mod:`pyphi.models` contains description of these
structures.

.. note::

The network and subsystem discussed here are returned by the
:func:`pyphi.examples.basic_network` and
:func:`pyphi.examples.basic_subsystem` functions.

3 changes: 0 additions & 3 deletions docs/examples/conventions.rst
Original file line number Diff line number Diff line change
Expand Up @@ -57,20 +57,17 @@ Low Index nodes. The other convention, where the highest-index node varies the
fastest, is similarly called **HOLI**.

.. note::

The rationale for this choice of convention is that the **LOLI** mapping is
stable under changes in the number of nodes, in the sense that the same bit
always corresponds to the same node index. The **HOLI** mapping does not
have this property.

.. note::

This applies to only situations where decimal indices are encoding states.
Whenever a network state is represented as a list or tuple, we use the only
sensible convention: the |ith| element gives the state of the |ith| node.

.. note::

There are various conversion functions available for converting between
TPMs, states, and indices using different conventions: see the
:mod:`pyphi.convert` module.
2 changes: 1 addition & 1 deletion pyphi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
"""

__title__ = 'pyphi'
__version__ = '0.3.8'
__version__ = '0.4.0'
__description__ = 'Python library for computing integrated information.',
__author__ = 'Will Mayner'
__author_email__ = '[email protected]'
Expand Down
181 changes: 134 additions & 47 deletions pyphi/compute.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import functools
from time import time
import numpy as np
from joblib import Parallel, delayed
import multiprocessing
from scipy.sparse.csgraph import connected_components
from scipy.sparse import csr_matrix

Expand Down Expand Up @@ -47,6 +47,7 @@ def concept(subsystem, mechanism):
:mod:`pyphi.constants`.
"""
start = time()

def time_annotated(concept):
concept.time = time() - start
return concept
Expand Down Expand Up @@ -199,12 +200,12 @@ def conceptual_information(subsystem):

# TODO document
def _null_mip(subsystem):
"""Returns a BigMip with zero phi and empty constellations.
"""Returns a BigMip with zero Phi and empty constellations.
This is the MIP associated with a reducible subsystem."""
return BigMip(subsystem=subsystem, cut_subsystem=subsystem,
phi=0.0,
unpartitioned_constellation=[], partitioned_constellation=[])
unpartitioned_constellation=(), partitioned_constellation=())


def _single_node_mip(subsystem):
Expand All @@ -215,8 +216,8 @@ def _single_node_mip(subsystem):
# TODO return the actual concept
return BigMip(
phi=0.5,
unpartitioned_constellation=None,
partitioned_constellation=None,
unpartitioned_constellation=(),
partitioned_constellation=(),
subsystem=subsystem,
cut_subsystem=subsystem)
else:
Expand Down Expand Up @@ -265,6 +266,86 @@ def _evaluate_partition(uncut_subsystem, partition,
return min(forward_mip, backward_mip)


# Wrapper for _evaluate_partition for parallel processing.
def _eval_wrapper(in_queue, out_queue, subsystem, unpartitioned_constellation):
while True:
partition = in_queue.get()
if partition is None:
break
new_mip = _evaluate_partition(subsystem, partition,
unpartitioned_constellation)
out_queue.put(new_mip)
out_queue.put(None)


def _find_mip_parallel(subsystem, bipartitions, unpartitioned_constellation,
min_mip):
"""Parallel loop over all partitions, using the specified number of
cores."""
if config.NUMBER_OF_CORES < 0:
number_of_processes = (multiprocessing.cpu_count() +
config.NUMBER_OF_CORES + 1)
elif config.NUMBER_OF_CORES <= multiprocessing.cpu_count():
number_of_processes = config.NUMBER_OF_CORES
else:
raise ValueError(
'Invalid number of cores; value may not be 0, and must be less'
'than the number of cores ({} for this '
'system).'.format(multiprocessing.cpu_count()))
# Define input and output queues to allow short-circuit if a cut if found
# with zero Phi. Load the input queue with all possible cuts and a 'poison
# pill' for each process.
in_queue = multiprocessing.Queue()
out_queue = multiprocessing.Queue()
for partition in bipartitions:
in_queue.put(partition)
for i in range(number_of_processes):
in_queue.put(None)
# Initialize the processes and start them.
processes = [
multiprocessing.Process(target=_eval_wrapper,
args=(in_queue, out_queue, subsystem,
unpartitioned_constellation))
for i in range(number_of_processes)
]
for i in range(number_of_processes):
processes[i].start()
# Continue to process output queue until all processes have completed, or a
# 'poison pill' has been returned.
while True:
new_mip = out_queue.get()
if new_mip is None:
number_of_processes -= 1
if number_of_processes == 0:
break
elif utils.phi_eq(new_mip.phi, 0):
min_mip = new_mip
for process in processes:
process.terminate()
break
else:
if new_mip < min_mip:
min_mip = new_mip
return min_mip


def _find_mip_sequential(subsystem, bipartitions, unpartitioned_constellation,
min_mip):
"""Sequentially loop over all partitions, holding only two BigMips in
memory at once."""
for i, partition in enumerate(bipartitions):
new_mip = _evaluate_partition(
subsystem, partition, unpartitioned_constellation)
log.debug("Finished {} of {} partitions.".format(
i + 1, len(bipartitions)))
if new_mip < min_mip:
min_mip = new_mip
# Short-circuit as soon as we find a MIP with effectively 0 phi.
if not min_mip:
break
return min_mip


# TODO document big_mip
@memory.cache(ignore=["subsystem"])
def _big_mip(cache_key, subsystem):
Expand Down Expand Up @@ -317,32 +398,15 @@ def time_annotated(big_mip, small_phi_time=0.0):
unpartitioned_constellation = constellation(subsystem)
small_phi_time = time() - small_phi_start
log.debug("Found unpartitioned constellation.")

min_mip = _null_mip(subsystem)
min_mip.phi = float('inf')
if config.PARALLEL_CUT_EVALUATION:
# Parallel loop over all partitions, using the specified number of
# cores.
mip_candidates = Parallel(n_jobs=(config.NUMBER_OF_CORES),
verbose=config.PARALLEL_VERBOSITY)(
delayed(_evaluate_partition)(subsystem, partition,
unpartitioned_constellation)
for partition in bipartitions)
return time_annotated(min(mip_candidates), small_phi_time)
min_mip = _find_mip_parallel(subsystem, bipartitions,
unpartitioned_constellation, min_mip)
else:
# Sequentially loop over all partitions, holding only two BigMips in
# memory at once.
min_mip = _null_mip(subsystem)
min_mip.phi = float('inf')
for i, partition in enumerate(bipartitions):
new_mip = _evaluate_partition(
subsystem, partition, unpartitioned_constellation)
log.debug("Finished {} of {} partitions.".format(
i + 1, len(bipartitions)))
if new_mip < min_mip:
min_mip = new_mip
# Short-circuit as soon as we find a MIP with effectively 0 phi.
if not min_mip:
break
return time_annotated(min_mip, small_phi_time)
min_mip = _find_mip_sequential(subsystem, bipartitions,
unpartitioned_constellation, min_mip)
result = time_annotated(min_mip, small_phi_time)

log.info("Finished calculating big-phi data for {}.".format(subsystem))
log.debug("RESULT: \n" + str(result))
Expand Down Expand Up @@ -373,34 +437,57 @@ def big_phi(subsystem):
return big_mip(subsystem).phi


def main_complex(network):
"""Return the main complex of the network."""
if not isinstance(network, Network):
raise ValueError(
"""Input must be a Network (perhaps you passed a Subsystem
instead?)""")
log.info("Calculating main complex for " + str(network) + "...")
result = max(complexes(network))
log.info("Finished calculating main complex for" + str(network) + ".")
log.debug("RESULT: \n" + str(result))
return result
def possible_main_complexes(network):
""""Return a generator of the subsystems of a network that could be a main
complex.
This is the just powerset of the nodes that have at least one input and
output (nodes with no inputs or no outputs cannot be part of a main
complex, because they do not have a causal link with the rest of the
subsystem in the past or future, respectively)."""
inputs = np.sum(network.connectivity_matrix, 1)
outputs = np.sum(network.connectivity_matrix, 0)
nodes_have_inputs_and_outputs = np.logical_and(inputs > 0, outputs > 0)
causally_significant_nodes = np.where(nodes_have_inputs_and_outputs)[0]
for subset in utils.powerset(causally_significant_nodes):
yield Subsystem(subset, network)

def subsystems(network):
"""Return a generator of all possible subsystems of a network.

This is the just powerset of the network's set of nodes."""
for subset in utils.powerset(range(network.size)):
def subsystems(network):
"""Return a generator of all possible subsystems of a network."""
for subset in utils.powerset(network.node_indices):
yield Subsystem(subset, network)


def complexes(network):
"""Return a generator for all complexes of the network.
"""Return a generator for all irreducible complexes of the network."""
if not isinstance(network, Network):
raise ValueError(
"""Input must be a Network (perhaps you passed a Subsystem
instead?)""")
return tuple(filter(None, (big_mip(subsystem) for subsystem in
possible_main_complexes(network))))


This includes reducible, zero-phi complexes (which are not, strictly
speaking, complexes at all)."""
def all_complexes(network):
"""Return a generator for all complexes of the network, including
reducible, zero-phi complexes (which are not, strictly speaking, complexes
at all)."""
if not isinstance(network, Network):
raise ValueError(
"""Input must be a Network (perhaps you passed a Subsystem
instead?)""")
return (big_mip(subsystem) for subsystem in subsystems(network))


def main_complex(network):
"""Return the main complex of the network."""
if not isinstance(network, Network):
raise ValueError(
"""Input must be a Network (perhaps you passed a Subsystem
instead?)""")
log.info("Calculating main complex for " + str(network) + "...")
result = max(complexes(network))
log.info("Finished calculating main complex for" + str(network) + ".")
log.debug("RESULT: \n" + str(result))
return result
1 change: 0 additions & 1 deletion pyphi/examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ def basic_network():
+---+---+---+---+
.. note::
|CM[i][j] = 1| means that node |i| is connected to node |j|.
"""
tpm = np.array([
Expand Down
17 changes: 10 additions & 7 deletions pyphi/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def __eq__(self, other):
def __bool__(self):
"""A Mip is truthy if it is not reducible; i.e. if it has a significant
amount of |small_phi|."""
return self.phi > constants.EPSILON
return not utils.phi_eq(self.phi, 0)

def __hash__(self):
return hash((self.phi, self.direction, self.mechanism, self.purview,
Expand Down Expand Up @@ -414,14 +414,17 @@ def __str__(self):
def __bool__(self):
"""A Concept is truthy if it is not reducible; i.e. if it has a
significant amount of |big_phi|."""
return self.phi > constants.EPSILON
return not utils.phi_eq(self.phi, 0)

def eq_repertoires(self, other):
"""Return whether this concept has the same cause and effect
repertoires as another."""
if self.subsystem.network != other.subsystem.network:
raise Exception("Can't compare repertoires of concepts from "
"different networks.")
repertoires as another.
.. warning::
This only checks if the cause and effect repertoires are equal as
arrays; mechanisms, purviews, or even the nodes that node indices
refer to, might be different.
"""
return (
np.array_equal(self.cause.repertoire, other.cause.repertoire) and
np.array_equal(self.effect.repertoire, other.effect.repertoire))
Expand Down Expand Up @@ -545,7 +548,7 @@ def __eq__(self, other):
def __bool__(self):
"""A BigMip is truthy if it is not reducible; i.e. if it has a
significant amount of |big_phi|."""
return self.phi >= constants.EPSILON
return not utils.phi_eq(self.phi, 0)

def __hash__(self):
return hash((self.phi, self.unpartitioned_constellation,
Expand Down
Loading

0 comments on commit 61e15f2

Please sign in to comment.