Skip to content

Commit

Permalink
adding compression parameter to evolve function
Browse files Browse the repository at this point in the history
  • Loading branch information
lantunes committed Apr 14, 2021
1 parent 1f9f324 commit 8fce8da
Show file tree
Hide file tree
Showing 18 changed files with 117 additions and 74 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pip install netomaton
```

Requirements for using this library are Python 3.6, numpy 1.15.4,
matplotlib 3.0.2, and networkx 2.5.
matplotlib 3.0.2, networkx 2.5, and msgpack 1.0.2.


### What are Network Automata?
Expand Down
6 changes: 5 additions & 1 deletion demos/fungal_growth/fungal_growth_demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,18 @@
timesteps = 100
width = 200
height = 200
# for longer timeframes (e.g. 1000 timesteps) and more nodes, set this to True;
# it will take a little longer, but the memory footprint will be greatly reduced
compression = False

initial_conditions = ntm.init_simple2d(width, height, val=R_E, dtype=float)

model = ntm.FungalGrowthModel(R_E, width, height, initial_conditions, seed=20210408)

activities, _ = ntm.evolve(topology=model.topology, initial_conditions=initial_conditions, timesteps=timesteps,
activity_rule=model.activity_rule, connectivity_rule=model.connectivity_rule,
update_order=model.update_order, copy_connectivity=model.copy_connectivity)
update_order=model.update_order, copy_connectivity=model.copy_connectivity,
compression=compression)

activities_list = ntm.convert_activities_map_to_list(activities)
ntm.animate(activities_list, shape=(width, height), interval=200, colormap="jet")
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
adjacency_matrix = [[0 for _ in range(N)] for _ in range(N)] # begin with a fully disconnected network of size N

def connectivity_rule(cctx):
choices = np.random.choice([n for n in cctx.connectivity_map], size=2, replace=True)
choices = [int(i) for i in np.random.choice([n for n in cctx.connectivity_map], size=2, replace=True)]
cctx.connectivity_map[choices[0]][choices[1]] = [{}]
cctx.connectivity_map[choices[1]][choices[0]] = [{}]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def connectivity_rule(cctx):
num_nodes = len(cctx.connectivity_map)
new_label = num_nodes
cctx.connectivity_map[new_label] = {}
connect_to = np.random.choice(list(range(num_nodes)))
connect_to = int(np.random.choice(list(range(num_nodes))))
cctx.connectivity_map[connect_to][new_label] = [{}]
cctx.connectivity_map[new_label][connect_to] = [{}]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def connectivity_rule(cctx):
cctx.connectivity_map[new_label] = {new_label: [{}]}
if random.random() < delta:
# choose 2 nodes at random, without replacement
choices = np.random.choice(list(cctx.connectivity_map.keys()), size=2, replace=False)
choices = [int(i) for i in np.random.choice(list(cctx.connectivity_map.keys()), size=2, replace=False)]
cctx.connectivity_map[choices[0]][choices[1]] = [{}]
cctx.connectivity_map[choices[1]][choices[0]] = [{}]

Expand Down
2 changes: 2 additions & 0 deletions docs/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,8 @@ The connectivity map provides a list of all the nodes in the network.
edge (i.e. for multi-edges) and the ID of the hyperedge the edge belongs to
-- this implies that, in some cases, the edges are stateful as well as nodes

- only Python objects are allowed in the connectivity map; no objects like NumPy numbers are allowed

# Time Evolution

- a node's state is called its activity
Expand Down
155 changes: 93 additions & 62 deletions netomaton/evolution.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import numpy as np
import scipy.sparse as sparse
import collections
import gc
from enum import Enum

import msgpack
import numpy as np
import scipy.sparse as sparse


class NodeContext(object):
"""
Expand Down Expand Up @@ -121,18 +124,19 @@ class UpdateOrder(Enum):

# TODO rename "connectivity" everywhere; to "topology" perhaps? as in "topological table"
def evolve(topology, initial_conditions=None, activity_rule=None, timesteps=None, input=None, connectivity_rule=None,
perturbation=None, past_conditions=None, update_order=UpdateOrder.ACTIVITIES_FIRST, copy_connectivity=True):
perturbation=None, past_conditions=None, update_order=UpdateOrder.ACTIVITIES_FIRST, copy_connectivity=True,
compression=False):

if initial_conditions is None:
initial_conditions = {}

# convert initial_conditions to map, if it isn't already
if not isinstance(initial_conditions, dict) and isinstance(initial_conditions, (list, np.ndarray)):
initial_conditions = {i: v for i, v in enumerate(initial_conditions)}
initial_conditions = {i: check_np(v) for i, v in enumerate(initial_conditions)}

# key is the timestep; value is a map of the network activities,
# where the key is the node label, and value is the activity
activities_over_time = {0: initial_conditions} #TODO should the first timestep start at 1?
activities_over_time = {0: Activities(initial_conditions, compression)}
input_fn, steps = _get_input_function(timesteps, input)

connectivity_map, was_adjacency_matrix = _get_connectivity_map(topology)
Expand All @@ -141,7 +145,10 @@ def evolve(topology, initial_conditions=None, activity_rule=None, timesteps=None
raise Exception("too few intial conditions specified [%s] for the number of given nodes [%s]" %
(len(initial_conditions), len(connectivity_map)))

connectivities_over_time = {0: connectivity_map}
connectivities_over_time = {0: Topology(connectivity_map, compression)}

prev_activities = initial_conditions
prev_connectivities = connectivity_map

t = 1
while True:
Expand All @@ -150,33 +157,37 @@ def evolve(topology, initial_conditions=None, activity_rule=None, timesteps=None
break

past = _get_past_activities(past_conditions, activities_over_time, t)
activities_over_time[t] = {}
connectivities_over_time[t] = {}
curr_activities = {}

if update_order is UpdateOrder.ACTIVITIES_FIRST:
added_nodes, removed_nodes = evolve_activities(activity_rule, t, inp, activities_over_time,
connectivities_over_time[t - 1], past, perturbation)
evolve_topology(connectivity_rule, t, activities_over_time[t], connectivities_over_time,
copy_connectivity, added_nodes, removed_nodes)
added_nodes, removed_nodes = evolve_activities(activity_rule, t, inp, curr_activities, prev_activities,
activities_over_time, prev_connectivities, past,
perturbation, compression)
curr_connectivities = evolve_topology(connectivity_rule, t, curr_activities, prev_connectivities,
connectivities_over_time, copy_connectivity, compression,
added_nodes, removed_nodes)

elif update_order is UpdateOrder.TOPOLOGY_FIRST:
evolve_topology(connectivity_rule, t, activities_over_time[t - 1],
connectivities_over_time, copy_connectivity)
curr_connectivities = evolve_topology(connectivity_rule, t, prev_activities, prev_connectivities,
connectivities_over_time, copy_connectivity, compression)
# added and removed nodes are ignore in this case
evolve_activities(activity_rule, t, inp, activities_over_time, connectivities_over_time[t], past,
perturbation)
evolve_activities(activity_rule, t, inp, curr_activities, prev_activities,
activities_over_time, curr_connectivities, past, perturbation, compression)

elif update_order is UpdateOrder.SYNCHRONOUS:
# TODO create test
evolve_topology(connectivity_rule, t, activities_over_time[t - 1],
connectivities_over_time, copy_connectivity)
curr_connectivities = evolve_topology(connectivity_rule, t, prev_activities, prev_connectivities,
connectivities_over_time, copy_connectivity, compression)
# added and removed nodes are ignore in this case
evolve_activities(activity_rule, t, inp, activities_over_time, connectivities_over_time[t - 1], past,
perturbation)
evolve_activities(activity_rule, t, inp, curr_activities, prev_activities,
activities_over_time, prev_connectivities, past, perturbation, compression)

else:
raise Exception("unsupported update_order: %s" % update_order)

prev_activities = curr_activities
prev_connectivities = curr_connectivities

t += 1

if was_adjacency_matrix:
Expand All @@ -188,54 +199,56 @@ def evolve(topology, initial_conditions=None, activity_rule=None, timesteps=None
return activities_over_time, connectivities_over_time


def evolve_activities(activity_rule, t, inp, activities_over_time, connectivity_map, past, perturbation):
def evolve_activities(activity_rule, t, inp, curr_activities, prev_activities, activities_over_time, connectivity_map,
past, perturbation, compression):
added_nodes = []
removed_nodes = []
if activity_rule:
added_nodes, removed_nodes = do_activity_rule(t, inp, activities_over_time, connectivity_map,
added_nodes, removed_nodes = do_activity_rule(t, inp, curr_activities, prev_activities, connectivity_map,
activity_rule, past, perturbation)

if added_nodes:
for state, outgoing_links, node_label in added_nodes:
activities_over_time[t][node_label] = state
curr_activities[node_label] = state

activities_over_time[t] = Activities(curr_activities, compression)

return added_nodes, removed_nodes


def do_activity_rule(t, inp, activities_over_time, connectivity_map, activity_rule, past, perturbation):
def do_activity_rule(t, inp, curr_activities, prev_activities, connectivity_map, activity_rule, past, perturbation):
added_nodes = []
removed_nodes = []

last_activities = activities_over_time[t - 1]

for node_label, incoming_connections in connectivity_map.items():
neighbour_labels = [k for k in incoming_connections]
current_activity = last_activities[node_label]
neighbourhood_activities = [last_activities[neighbour_label] for neighbour_label in neighbour_labels]
current_activity = prev_activities[node_label]
neighbourhood_activities = [prev_activities[neighbour_label] for neighbour_label in neighbour_labels]
node_in = None if inp == "__timestep__" else inp[node_label] if _is_indexable(inp) else inp
ctx = NodeContext(node_label, t, last_activities, neighbour_labels, neighbourhood_activities,
ctx = NodeContext(node_label, t, prev_activities, neighbour_labels, neighbourhood_activities,
incoming_connections, current_activity, past, node_in)

new_activity = activity_rule(ctx)
new_activity = check_np(new_activity)

if ctx.added_nodes:
added_nodes.extend(ctx.added_nodes)
if ctx.removed_nodes:
removed_nodes.extend(ctx.removed_nodes)

if node_label not in ctx.removed_nodes:
activities_over_time[t][node_label] = new_activity
curr_activities[node_label] = new_activity

if perturbation is not None:
pctx = PerturbationContext(node_label, activities_over_time[t][node_label], t, node_in)
activities_over_time[t][node_label] = perturbation(pctx)
pctx = PerturbationContext(node_label, curr_activities[node_label], t, node_in)
curr_activities[node_label] = perturbation(pctx)

return added_nodes, removed_nodes


def evolve_topology(connectivity_rule, t, activities, connectivities_over_time, copy_connectivity,
added_nodes=None, removed_nodes=None):
connectivity_map = connectivities_over_time[t - 1]
def evolve_topology(connectivity_rule, t, activities, prev_connectivities, connectivities_over_time, copy_connectivity,
compression, added_nodes=None, removed_nodes=None):
connectivity_map = prev_connectivities
if added_nodes or removed_nodes:
connectivity_map = copy_connectivity_map(connectivity_map)

Expand All @@ -255,37 +268,19 @@ def evolve_topology(connectivity_rule, t, activities, connectivities_over_time,
if not connectivity_map:
raise Exception("connectivity rule must return a connectivity map")

connectivities_over_time[t] = connectivity_map
connectivities_over_time[t] = Topology(connectivity_map, compression)

return connectivity_map


def copy_connectivity_map(conn_map):
new_map = type(conn_map)()
for k1, v1 in conn_map.items():
new_links = type(v1)()
for k2, v2 in v1.items():
new_links[k2] = _deep_copy_value(v2)
new_map[k1] = new_links
return new_map


def _deep_copy_value(val):
if isinstance(val, (list, tuple)):
new_val = []
for v in val:
new_val.append(_deep_copy_value(v))
val = tuple(new_val) if isinstance(val, tuple) else new_val
elif isinstance(val, (dict, collections.OrderedDict)):
new_val = type(val)()
for k, v in val.items():
new_val[k] = _deep_copy_value(v)
val = new_val
return val
return _CompressedDict(conn_map, True).to_dict()


def convert_activities_map_to_list(activities_map_over_time):
activities_over_time = []
for timestep in sorted(activities_map_over_time): #TODO do we need to sort?
activities = activities_map_over_time[timestep]
for i in activities_map_over_time:
activities = activities_map_over_time[i].to_dict()
activities_list = []
for c in sorted(activities): #TODO do we need to sort?
activities_list.append(activities[c])
Expand All @@ -295,8 +290,8 @@ def convert_activities_map_to_list(activities_map_over_time):

def convert_connectivities_map_to_list(connectivities_map_over_time):
connectivities_over_time = []
for timestep in sorted(connectivities_map_over_time): #TODO do we need to sort?
connectivities = connectivities_map_over_time[timestep]
for i in connectivities_map_over_time:
connectivities = connectivities_map_over_time[i].to_dict()
num_nodes = len(connectivities)
adjacency_matrix = [[0. for _ in range(num_nodes)] for _ in range(num_nodes)]
for c in sorted(connectivities): #TODO do we need to sort?
Expand Down Expand Up @@ -349,7 +344,7 @@ def _get_past_activities(past_conditions, activities_over_time, t):
last_t = t - 1
for i in range(len(past_conditions)-1, -1, -1):

curr_past_cond = past_conditions[last_t-1] if last_t < 1 else activities_over_time[last_t-1]
curr_past_cond = past_conditions[last_t-1] if last_t < 1 else activities_over_time[last_t-1].to_dict()
if not isinstance(curr_past_cond, dict) and isinstance(curr_past_cond, (list, np.ndarray)):
curr_past_cond = {i: v for i, v in enumerate(curr_past_cond)}

Expand Down Expand Up @@ -448,3 +443,39 @@ def init_simple2d(rows, cols, val=1, dtype=np.int):
x = np.zeros((rows, cols), dtype=dtype)
x[x.shape[0]//2][x.shape[1]//2] = val
return np.array(x).reshape(rows * cols).tolist()


class _CompressedDict:
__slots__ = ("_map", "_compression")

def __init__(self, m, compression):
self._compression = compression
self._map = self._compress(m) if compression else m

def to_dict(self):
return self._decompress(self._map) if self._compression else self._map

def _compress(self, d):
return msgpack.packb(d)

def _decompress(self, b):
gc.disable()
d = msgpack.unpackb(b, strict_map_key=False)
gc.enable()
return d


class Topology(_CompressedDict):
def __init__(self, connectivity_map, compression):
super().__init__(connectivity_map, compression)


class Activities(_CompressedDict):
def __init__(self, activities, compression):
super().__init__(activities, compression)


def check_np(obj):
if isinstance(obj, np.generic):
return np.asscalar(obj)
return obj
2 changes: 1 addition & 1 deletion netomaton/substitution_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,6 @@ def connectivity_map(self):
return self._connectivity_map

def pad(self, activities):
activities = [[activities[k][e] for e in sorted(activities[k])] for k in sorted(activities)]
activities = [[v for e, v in sorted(activities[k].to_dict().items())] for k in sorted(activities)]
max_len = np.max([len(a) for a in activities])
return np.asarray([np.pad(a, (0, max_len - len(a)), 'constant', constant_values=0) for a in activities])
1 change: 1 addition & 0 deletions netomaton/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,7 @@ def update(connectivity_map):

def connectivity_map_to_nx(connectivity_map):
G = nx.MultiDiGraph()
connectivity_map = connectivity_map.to_dict()
for node in connectivity_map:
G.add_node(node)
for from_node, connection_state in connectivity_map[node].items():
Expand Down
2 changes: 1 addition & 1 deletion netomaton/wolfram_physics_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,5 +154,5 @@ def _matches(self, rule, relation, symbol_bindings):
def to_configurations(self, connectivities_over_time):
configs = []
for timestep in connectivities_over_time:
configs.append(self.connectivity_map_to_config(connectivities_over_time[timestep]))
configs.append(self.connectivity_map_to_config(connectivities_over_time[timestep].to_dict()))
return configs
3 changes: 2 additions & 1 deletion netomaton_dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@ dependencies:
- matplotlib=3.0.2
- networkx=2.5
- numpy=1.15.4
- mkdocs=1.1.2
- mkdocs=1.1.2
- msgpack-python=1.0.2
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ scipy==1.3.1
matplotlib==3.0.2
networkx==2.5
numpy==1.15.4
msgpack==1.0.2
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@
packages=packages,
keywords=["network automata", "cellular automata", "complexity", "complex systems", "computation", "non-linear dynamics"],
python_requires='>3.5.2',
install_requires=["numpy >= 1.15.4", "matplotlib >= 3.0.2", "networkx == 2.2", "scipy == 1.3.1"])
install_requires=["numpy >= 1.15.4", "matplotlib >= 3.0.2", "networkx == 2.2", "scipy == 1.3.1", "msgpack == 1.0.2"])
2 changes: 2 additions & 0 deletions tests/test_fungal_growth_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def test_fungal_growth(self):

expected = self._convert_from_literal("fungal_growth_model.txt")

c = {i: t.to_dict() for i, t in c.items()}
self.assertEqual(expected, c)

def test_fungal_growth_with_resource_layer(self):
Expand Down Expand Up @@ -53,4 +54,5 @@ def test_fungal_growth_with_resource_layer(self):

expected = self._convert_from_literal("fungal_growth_model.txt")

c = {i: t.to_dict() for i, t in c.items()}
self.assertEqual(expected, c)
Loading

0 comments on commit 8fce8da

Please sign in to comment.