networkx-1.8.1/0000775000175000017500000000000012177457361013277 5ustar aricaric00000000000000networkx-1.8.1/setup.cfg0000664000175000017500000000012112177456333015110 0ustar aricaric00000000000000[nosetests] verbosity=0 detailed-errors=1 with-doctest=1 doctest-extension=txt networkx-1.8.1/networkx/0000775000175000017500000000000012177457361015160 5ustar aricaric00000000000000networkx-1.8.1/networkx/utils/0000775000175000017500000000000012177457361016320 5ustar aricaric00000000000000networkx-1.8.1/networkx/utils/decorators.py0000664000175000017500000002061612177456333021042 0ustar aricaric00000000000000import sys from collections import defaultdict from os.path import splitext import networkx as nx from networkx.external.decorator import decorator from networkx.utils import is_string_like def not_implemented_for(*graph_types): """Decorator to mark algorithms as not implemented Parameters ---------- graph_types : container of strings Entries must be one of 'directed','undirected', 'multigraph', 'graph'. Returns ------- _require : function The decorated function. Raises ------ NetworkXNotImplemnted If any of the packages cannot be imported Notes ----- Multiple types are joined logically with "and". For "or" use multiple @not_implemented_for() lines. Examples -------- Decorate functions like this:: @not_implemnted_for('directed') def sp_function(): pass @not_implemnted_for('directed','multigraph') def sp_np_function(): pass """ @decorator def _not_implemented_for(f,*args,**kwargs): graph = args[0] terms= {'directed':graph.is_directed(), 'undirected':not graph.is_directed(), 'multigraph':graph.is_multigraph(), 'graph':not graph.is_multigraph()} match = True try: for t in graph_types: match = match and terms[t] except KeyError: raise KeyError('use one or more of ', 'directed, undirected, multigraph, graph') if match: raise nx.NetworkXNotImplemented('not implemented for %s type'% ' '.join(graph_types)) else: return f(*args,**kwargs) return _not_implemented_for def require(*packages): """Decorator to check whether specific packages can be imported. If a package cannot be imported, then NetworkXError is raised. If all packages can be imported, then the original function is called. Parameters ---------- packages : container of strings Container of module names that will be imported. Returns ------- _require : function The decorated function. Raises ------ NetworkXError If any of the packages cannot be imported Examples -------- Decorate functions like this:: @require('scipy') def sp_function(): import scipy pass @require('numpy','scipy') def sp_np_function(): import numpy import scipy pass """ @decorator def _require(f,*args,**kwargs): for package in reversed(packages): try: __import__(package) except: msg = "{0} requires {1}" raise nx.NetworkXError( msg.format(f.__name__, package) ) return f(*args,**kwargs) return _require def _open_gz(path, mode): import gzip return gzip.open(path,mode=mode) def _open_bz2(path, mode): import bz2 return bz2.BZ2File(path,mode=mode) # To handle new extensions, define a function accepting a `path` and `mode`. # Then add the extension to _dispatch_dict. _dispatch_dict = defaultdict(lambda : open) _dispatch_dict['.gz'] = _open_gz _dispatch_dict['.bz2'] = _open_bz2 _dispatch_dict['.gzip'] = _open_gz def open_file(path_arg, mode='r'): """Decorator to ensure clean opening and closing of files. Parameters ---------- path_arg : int Location of the path argument in args. Even if the argument is a named positional argument (with a default value), you must specify its index as a positional argument. mode : str String for opening mode. Returns ------- _open_file : function Function which cleanly executes the io. Examples -------- Decorate functions like this:: @open_file(0,'r') def read_function(pathname): pass @open_file(1,'w') def write_function(G,pathname): pass @open_file(1,'w') def write_function(G, pathname='graph.dot') pass @open_file('path', 'w+') def another_function(arg, **kwargs): path = kwargs['path'] pass """ # Note that this decorator solves the problem when a path argument is # specified as a string, but it does not handle the situation when the # function wants to accept a default of None (and then handle it). # Here is an example: # # @open_file('path') # def some_function(arg1, arg2, path=None): # if path is None: # fobj = tempfile.NamedTemporaryFile(delete=False) # close_fobj = True # else: # # `path` could have been a string or file object or something # # similar. In any event, the decorator has given us a file object # # and it will close it for us, if it should. # fobj = path # close_fobj = False # # try: # fobj.write('blah') # finally: # if close_fobj: # fobj.close() # # Normally, we'd want to use "with" to ensure that fobj gets closed. # However, recall that the decorator will make `path` a file object for # us, and using "with" would undesirably close that file object. Instead, # you use a try block, as shown above. When we exit the function, fobj will # be closed, if it should be, by the decorator. @decorator def _open_file(func, *args, **kwargs): # Note that since we have used @decorator, *args, and **kwargs have # already been resolved to match the function signature of func. This # means default values have been propagated. For example, the function # func(x, y, a=1, b=2, **kwargs) if called as func(0,1,b=5,c=10) would # have args=(0,1,1,5) and kwargs={'c':10}. # First we parse the arguments of the decorator. The path_arg could # be an positional argument or a keyword argument. Even if it is try: # path_arg is a required positional argument # This works precisely because we are using @decorator path = args[path_arg] except TypeError: # path_arg is a keyword argument. It is "required" in the sense # that it must exist, according to the decorator specification, # It can exist in `kwargs` by a developer specified default value # or it could have been explicitly set by the user. try: path = kwargs[path_arg] except KeyError: # Could not find the keyword. Thus, no default was specified # in the function signature and the user did not provide it. msg = 'Missing required keyword argument: {0}' raise nx.NetworkXError(msg.format(path_arg)) else: is_kwarg = True except IndexError: # A "required" argument was missing. This can only happen if # the decorator of the function was incorrectly specified. # So this probably is not a user error, but a developer error. msg = "path_arg of open_file decorator is incorrect" raise nx.NetworkXError(msg) else: is_kwarg = False # Now we have the path_arg. There are two types of input to consider: # 1) string representing a path that should be opened # 2) an already opened file object if is_string_like(path): ext = splitext(path)[1] fobj = _dispatch_dict[ext](path, mode=mode) close_fobj = True elif hasattr(path, 'read'): # path is already a file-like object fobj = path close_fobj = False else: # could be None, in which case the algorithm will deal with it fobj = path close_fobj = False # Insert file object into args or kwargs. if is_kwarg: new_args = args kwargs[path_arg] = fobj else: # args is a tuple, so we must convert to list before modifying it. new_args = list(args) new_args[path_arg] = fobj # Finally, we call the original function, making sure to close the fobj. try: result = func(*new_args, **kwargs) finally: if close_fobj: fobj.close() return result return _open_file networkx-1.8.1/networkx/utils/random_sequence.py0000664000175000017500000001423312177456333022043 0ustar aricaric00000000000000""" Utilities for generating random numbers, random sequences, and random selections. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import random import sys import networkx as nx __author__ = '\n'.join(['Aric Hagberg (hagberg@lanl.gov)', 'Dan Schult(dschult@colgate.edu)', 'Ben Edwards(bedwards@cs.unm.edu)']) def create_degree_sequence(n, sfunction=None, max_tries=50, **kwds): """ Attempt to create a valid degree sequence of length n using specified function sfunction(n,**kwds). Parameters ---------- n : int Length of degree sequence = number of nodes sfunction: function Function which returns a list of n real or integer values. Called as "sfunction(n,**kwds)". max_tries: int Max number of attempts at creating valid degree sequence. Notes ----- Repeatedly create a degree sequence by calling sfunction(n,**kwds) until achieving a valid degree sequence. If unsuccessful after max_tries attempts, raise an exception. For examples of sfunctions that return sequences of random numbers, see networkx.Utils. Examples -------- >>> from networkx.utils import uniform_sequence, create_degree_sequence >>> seq=create_degree_sequence(10,uniform_sequence) """ tries=0 max_deg=n while tries < max_tries: trialseq=sfunction(n,**kwds) # round to integer values in the range [0,max_deg] seq=[min(max_deg, max( int(round(s)),0 )) for s in trialseq] # if graphical return, else throw away and try again if nx.is_valid_degree_sequence(seq): return seq tries+=1 raise nx.NetworkXError(\ "Exceeded max (%d) attempts at a valid sequence."%max_tries) # The same helpers for choosing random sequences from distributions # uses Python's random module # http://www.python.org/doc/current/lib/module-random.html def pareto_sequence(n,exponent=1.0): """ Return sample sequence of length n from a Pareto distribution. """ return [random.paretovariate(exponent) for i in range(n)] def powerlaw_sequence(n,exponent=2.0): """ Return sample sequence of length n from a power law distribution. """ return [random.paretovariate(exponent-1) for i in range(n)] def zipf_rv(alpha, xmin=1, seed=None): r"""Return a random value chosen from the Zipf distribution. The return value is an integer drawn from the probability distribution ::math:: p(x)=\frac{x^{-\alpha}}{\zeta(\alpha,x_{min})}, where `\zeta(\alpha,x_{min})` is the Hurwitz zeta function. Parameters ---------- alpha : float Exponent value of the distribution xmin : int Minimum value seed : int Seed value for random number generator Returns ------- x : int Random value from Zipf distribution Raises ------ ValueError: If xmin < 1 or If alpha <= 1 Notes ----- The rejection algorithm generates random values for a the power-law distribution in uniformly bounded expected time dependent on parameters. See [1] for details on its operation. Examples -------- >>> nx.zipf_rv(alpha=2, xmin=3, seed=42) # doctest: +SKIP References ---------- ..[1] Luc Devroye, Non-Uniform Random Variate Generation, Springer-Verlag, New York, 1986. """ if xmin < 1: raise ValueError("xmin < 1") if alpha <= 1: raise ValueError("a <= 1.0") if not seed is None: random.seed(seed) a1 = alpha - 1.0 b = 2**a1 while True: u = 1.0 - random.random() # u in (0,1] v = random.random() # v in [0,1) x = int(xmin*u**-(1.0/a1)) t = (1.0+(1.0/x))**a1 if v*x*(t-1.0)/(b-1.0) <= t/b: break return x def zipf_sequence(n, alpha=2.0, xmin=1): """Return a sample sequence of length n from a Zipf distribution with exponent parameter alpha and minimum value xmin. See Also -------- zipf_rv """ return [ zipf_rv(alpha,xmin) for _ in range(n)] def uniform_sequence(n): """ Return sample sequence of length n from a uniform distribution. """ return [ random.uniform(0,n) for i in range(n)] def cumulative_distribution(distribution): """Return normalized cumulative distribution from discrete distribution.""" cdf=[] cdf.append(0.0) psum=float(sum(distribution)) for i in range(0,len(distribution)): cdf.append(cdf[i]+distribution[i]/psum) return cdf def discrete_sequence(n, distribution=None, cdistribution=None): """ Return sample sequence of length n from a given discrete distribution or discrete cumulative distribution. One of the following must be specified. distribution = histogram of values, will be normalized cdistribution = normalized discrete cumulative distribution """ import bisect if cdistribution is not None: cdf=cdistribution elif distribution is not None: cdf=cumulative_distribution(distribution) else: raise nx.NetworkXError( "discrete_sequence: distribution or cdistribution missing") # get a uniform random number inputseq=[random.random() for i in range(n)] # choose from CDF seq=[bisect.bisect_left(cdf,s)-1 for s in inputseq] return seq def random_weighted_sample(mapping, k): """Return k items without replacement from a weighted sample. The input is a dictionary of items with weights as values. """ if k > len(mapping): raise ValueError("sample larger than population") sample = set() while len(sample) < k: sample.add(weighted_choice(mapping)) return list(sample) def weighted_choice(mapping): """Return a single element from a weighted sample. The input is a dictionary of items with weights as values. """ # use roulette method rnd = random.random() * sum(mapping.values()) for k, w in mapping.items(): rnd -= w if rnd < 0: return k networkx-1.8.1/networkx/utils/__init__.py0000664000175000017500000000030012177456333020420 0ustar aricaric00000000000000from networkx.utils.misc import * from networkx.utils.decorators import * from networkx.utils.random_sequence import * from networkx.utils.union_find import * from networkx.utils.rcm import * networkx-1.8.1/networkx/utils/rcm.py0000664000175000017500000001072512177456333017456 0ustar aricaric00000000000000""" Cuthill-McKee ordering of graph nodes to produce sparse matrices """ # Copyright (C) 2011 by # Aric Hagberg # All rights reserved. # BSD license. from operator import itemgetter import networkx as nx __author__ = """\n""".join(['Aric Hagberg ']) __all__ = ['cuthill_mckee_ordering', 'reverse_cuthill_mckee_ordering'] def cuthill_mckee_ordering(G, start=None): """Generate an ordering (permutation) of the graph nodes to make a sparse matrix. Uses the Cuthill-McKee heuristic (based on breadth-first search) [1]_. Parameters ---------- G : graph A NetworkX graph start : node, optional Start algorithm and specified node. The node should be on the periphery of the graph for best results. Returns ------- nodes : generator Generator of nodes in Cuthill-McKee ordering. Examples -------- >>> from networkx.utils import cuthill_mckee_ordering >>> G = nx.path_graph(4) >>> rcm = list(cuthill_mckee_ordering(G)) >>> A = nx.adjacency_matrix(G, nodelist=rcm) # doctest: +SKIP See Also -------- reverse_cuthill_mckee_ordering Notes ----- The optimal solution the the bandwidth reduction is NP-complete [2]_. References ---------- .. [1] E. Cuthill and J. McKee. Reducing the bandwidth of sparse symmetric matrices, In Proc. 24th Nat. Conf. ACM, pages 157-172, 1969. http://doi.acm.org/10.1145/800195.805928 .. [2] Steven S. Skiena. 1997. The Algorithm Design Manual. Springer-Verlag New York, Inc., New York, NY, USA. """ for c in nx.connected_components(G): for n in connected_cuthill_mckee_ordering(G.subgraph(c), start): yield n def reverse_cuthill_mckee_ordering(G, start=None): """Generate an ordering (permutation) of the graph nodes to make a sparse matrix. Uses the reverse Cuthill-McKee heuristic (based on breadth-first search) [1]_. Parameters ---------- G : graph A NetworkX graph start : node, optional Start algorithm and specified node. The node should be on the periphery of the graph for best results. Returns ------- nodes : generator Generator of nodes in reverse Cuthill-McKee ordering. Examples -------- >>> from networkx.utils import reverse_cuthill_mckee_ordering >>> G = nx.path_graph(4) >>> rcm = list(reverse_cuthill_mckee_ordering(G)) >>> A = nx.adjacency_matrix(G, nodelist=rcm) # doctest: +SKIP See Also -------- cuthill_mckee_ordering Notes ----- The optimal solution the the bandwidth reduction is NP-complete [2]_. References ---------- .. [1] E. Cuthill and J. McKee. Reducing the bandwidth of sparse symmetric matrices, In Proc. 24th Nat. Conf. ACM, pages 157-72, 1969. http://doi.acm.org/10.1145/800195.805928 .. [2] Steven S. Skiena. 1997. The Algorithm Design Manual. Springer-Verlag New York, Inc., New York, NY, USA. """ return reversed(list(cuthill_mckee_ordering(G, start=start))) def connected_cuthill_mckee_ordering(G, start=None): # the cuthill mckee algorithm for connected graphs if start is None: (_, start) = find_pseudo_peripheral_node_pair(G) yield start visited = set([start]) stack = [(start, iter(G[start]))] while stack: parent,children = stack[0] if parent not in visited: yield parent try: child = next(children) if child not in visited: yield child visited.add(child) # add children to stack, sorted by degree (lowest first) nd = sorted(G.degree(G[child]).items(), key=itemgetter(1)) children = (n for n,d in nd) stack.append((child,children)) except StopIteration: stack.pop(0) def find_pseudo_peripheral_node_pair(G, start=None): # helper for cuthill-mckee to find a "pseudo peripheral pair" # to use as good starting node if start is None: u = next(G.nodes_iter()) else: u = start lp = 0 v = u while True: spl = nx.shortest_path_length(G, v) l = max(spl.values()) if l <= lp: break lp = l farthest = [n for n,dist in spl.items() if dist==l] v, deg = sorted(G.degree(farthest).items(), key=itemgetter(1))[0] return u, v networkx-1.8.1/networkx/utils/union_find.py0000664000175000017500000000467312177456333021032 0ustar aricaric00000000000000""" Union-find data structure. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx class UnionFind: """Union-find data structure. Each unionFind instance X maintains a family of disjoint sets of hashable objects, supporting the following two methods: - X[item] returns a name for the set containing the given item. Each set is named by an arbitrarily-chosen one of its members; as long as the set remains unchanged it will keep the same name. If the item is not yet part of a set in X, a new singleton set is created for it. - X.union(item1, item2, ...) merges the sets containing each item into a single larger set. If any item is not yet part of a set in X, it is added to X as one of the members of the merged set. Union-find data structure. Based on Josiah Carlson's code, http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912 with significant additional changes by D. Eppstein. http://www.ics.uci.edu/~eppstein/PADS/UnionFind.py """ def __init__(self): """Create a new empty union-find structure.""" self.weights = {} self.parents = {} def __getitem__(self, object): """Find and return the name of the set containing the object.""" # check for previously unknown object if object not in self.parents: self.parents[object] = object self.weights[object] = 1 return object # find path of objects leading to the root path = [object] root = self.parents[object] while root != path[-1]: path.append(root) root = self.parents[root] # compress the path and return for ancestor in path: self.parents[ancestor] = root return root def __iter__(self): """Iterate through all items ever found or unioned by this structure.""" return iter(self.parents) def union(self, *objects): """Find the sets containing the objects and merge them all.""" roots = [self[x] for x in objects] heaviest = max([(self.weights[r],r) for r in roots])[1] for r in roots: if r != heaviest: self.weights[heaviest] += self.weights[r] self.parents[r] = heaviest networkx-1.8.1/networkx/utils/tests/0000775000175000017500000000000012177457361017462 5ustar aricaric00000000000000networkx-1.8.1/networkx/utils/tests/test_decorators.py0000664000175000017500000001035512177456333023242 0ustar aricaric00000000000000import tempfile import os from nose.tools import * import networkx as nx from networkx.utils.decorators import open_file,require,not_implemented_for def test_not_implemented_decorator(): @not_implemented_for('directed') def test1(G): pass test1(nx.Graph()) @raises(KeyError) def test_not_implemented_decorator_key(): @not_implemented_for('foo') def test1(G): pass test1(nx.Graph()) @raises(nx.NetworkXNotImplemented) def test_not_implemented_decorator_raise(): @not_implemented_for('graph') def test1(G): pass test1(nx.Graph()) def test_require_decorator1(): @require('os','sys') def test1(): import os import sys test1() def test_require_decorator2(): @require('blahhh') def test2(): import blahhh assert_raises(nx.NetworkXError, test2) class TestOpenFileDecorator(object): def setUp(self): self.text = ['Blah... ', 'BLAH ', 'BLAH!!!!'] self.fobj = tempfile.NamedTemporaryFile('wb+', delete=False) self.name = self.fobj.name def write(self, path): for text in self.text: path.write(text.encode('ascii')) @open_file(1, 'r') def read(self, path): return path.readlines()[0] @staticmethod @open_file(0, 'wb') def writer_arg0(path): path.write('demo'.encode('ascii')) @open_file(1, 'wb+') def writer_arg1(self, path): self.write(path) @open_file(2, 'wb') def writer_arg2default(self, x, path=None): if path is None: fh = tempfile.NamedTemporaryFile('wb+', delete=False) close_fh = True else: fh = path close_fh = False try: self.write(fh) finally: if close_fh: fh.close() @open_file(4, 'wb') def writer_arg4default(self, x, y, other='hello', path=None, **kwargs): if path is None: fh = tempfile.NamedTemporaryFile('wb+', delete=False) close_fh = True else: fh = path close_fh = False try: self.write(fh) finally: if close_fh: fh.close() @open_file('path', 'wb') def writer_kwarg(self, **kwargs): path = kwargs.get('path', None) if path is None: fh = tempfile.NamedTemporaryFile('wb+', delete=False) close_fh = True else: fh = path close_fh = False try: self.write(fh) finally: if close_fh: fh.close() def test_writer_arg0_str(self): self.writer_arg0(self.name) def test_writer_arg0_fobj(self): self.writer_arg0(self.fobj) def test_writer_arg1_str(self): self.writer_arg1(self.name) assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_arg1_fobj(self): self.writer_arg1(self.fobj) assert_false(self.fobj.closed) self.fobj.close() assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_arg2default_str(self): self.writer_arg2default(0, path=None) self.writer_arg2default(0, path=self.name) assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_arg2default_fobj(self): self.writer_arg2default(0, path=self.fobj) assert_false(self.fobj.closed) self.fobj.close() assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_arg2default_fobj(self): self.writer_arg2default(0, path=None) def test_writer_arg4default_fobj(self): self.writer_arg4default(0, 1, dog='dog', other='other2') self.writer_arg4default(0, 1, dog='dog', other='other2', path=self.name) assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_kwarg_str(self): self.writer_kwarg(path=self.name) assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_kwarg_fobj(self): self.writer_kwarg(path=self.fobj) self.fobj.close() assert_equal( self.read(self.name), ''.join(self.text) ) def test_writer_kwarg_fobj(self): self.writer_kwarg(path=None) def tearDown(self): self.fobj.close() networkx-1.8.1/networkx/utils/tests/test_random_sequence.py0000664000175000017500000000175512177456333024251 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from networkx.utils import uniform_sequence,powerlaw_sequence,\ create_degree_sequence,zipf_rv,zipf_sequence,random_weighted_sample,\ weighted_choice import networkx.utils def test_degree_sequences(): seq=create_degree_sequence(10,uniform_sequence) assert_equal(len(seq), 10) seq=create_degree_sequence(10,powerlaw_sequence) assert_equal(len(seq), 10) def test_zipf_rv(): r = zipf_rv(2.3) assert_true(type(r),int) assert_raises(ValueError,zipf_rv,0.5) assert_raises(ValueError,zipf_rv,2,xmin=0) def test_zipf_sequence(): s = zipf_sequence(10) assert_equal(len(s),10) def test_random_weighted_sample(): mapping={'a':10,'b':20} s = random_weighted_sample(mapping,2) assert_equal(sorted(s),sorted(mapping.keys())) assert_raises(ValueError,random_weighted_sample,mapping,3) def test_random_weighted_choice(): mapping={'a':10,'b':0} c = weighted_choice(mapping) assert_equal(c,'a') networkx-1.8.1/networkx/utils/tests/test.txt0000664000175000017500000000002612177456333021176 0ustar aricaric00000000000000Blah... BLAH BLAH!!!! networkx-1.8.1/networkx/utils/tests/test_rcm.py0000664000175000017500000000113012177456333021645 0ustar aricaric00000000000000from nose.tools import * from networkx.utils import reverse_cuthill_mckee_ordering import networkx as nx def test_reverse_cuthill_mckee(): # example graph from # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp G = nx.Graph([(0,3),(0,5),(1,2),(1,4),(1,6),(1,9),(2,3), (2,4),(3,5),(3,8),(4,6),(5,6),(5,7),(6,7)]) rcm = list(reverse_cuthill_mckee_ordering(G,start=0)) assert_equal(rcm,[9, 1, 4, 6, 7, 2, 8, 5, 3, 0]) rcm = list(reverse_cuthill_mckee_ordering(G)) assert_equal(rcm,[0, 8, 5, 7, 3, 6, 4, 2, 1, 9]) networkx-1.8.1/networkx/utils/tests/test_misc.py0000664000175000017500000000437512177456333022035 0ustar aricaric00000000000000from nose.tools import * from nose import SkipTest import networkx as nx from networkx.utils import * def test_is_string_like(): assert_true(is_string_like("aaaa")) assert_false(is_string_like(None)) assert_false(is_string_like(123)) def test_iterable(): assert_false(iterable(None)) assert_false(iterable(10)) assert_true(iterable([1,2,3])) assert_true(iterable((1,2,3))) assert_true(iterable({1:"A",2:"X"})) assert_true(iterable("ABC")) def test_graph_iterable(): K=nx.complete_graph(10) assert_true(iterable(K)) assert_true(iterable(K.nodes_iter())) assert_true(iterable(K.edges_iter())) def test_is_list_of_ints(): assert_true(is_list_of_ints([1,2,3,42])) assert_false(is_list_of_ints([1,2,3,"kermit"])) def test_random_number_distribution(): # smoke test only z=uniform_sequence(20) z=powerlaw_sequence(20,exponent=2.5) z=pareto_sequence(20,exponent=1.5) z=discrete_sequence(20,distribution=[0,0,0,0,1,1,1,1,2,2,3]) class TestNumpyArray(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global numpy global assert_equal global assert_almost_equal try: import numpy from numpy.testing import assert_equal,assert_almost_equal except ImportError: raise SkipTest('NumPy not available.') def test_dict_to_numpy_array1(self): d = {'a':1,'b':2} a = dict_to_numpy_array1(d) assert_equal(a, numpy.array([1,2])) a = dict_to_numpy_array1(d, mapping = {'b':0,'a':1}) assert_equal(a, numpy.array([2,1])) def test_dict_to_numpy_array2(self): d = {'a': {'a':1,'b':2}, 'b': {'a':10,'b':20}} a = dict_to_numpy_array(d) assert_equal(a, numpy.array([[1,2],[10,20]])) a = dict_to_numpy_array2(d, mapping = {'b':0,'a':1}) assert_equal(a, numpy.array([[20,10],[2,1]])) def test_dict_to_numpy_array(self): d = {'a': {'a':1,'b':2}, 'b': {'a':10,'b':20}} a = dict_to_numpy_array(d) assert_equal(a, numpy.array([[1,2],[10,20]])) d = {'a':1,'b':2} a = dict_to_numpy_array1(d) assert_equal(a, numpy.array([1,2])) networkx-1.8.1/networkx/utils/misc.py0000664000175000017500000001006412177456333017624 0ustar aricaric00000000000000""" Miscellaneous Helpers for NetworkX. These are not imported into the base networkx namespace but can be accessed, for example, as >>> import networkx >>> networkx.utils.is_string_like('spam') True """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import sys import subprocess import uuid import networkx as nx from networkx.external.decorator import decorator __author__ = '\n'.join(['Aric Hagberg (hagberg@lanl.gov)', 'Dan Schult(dschult@colgate.edu)', 'Ben Edwards(bedwards@cs.unm.edu)']) ### some cookbook stuff # used in deciding whether something is a bunch of nodes, edges, etc. # see G.add_nodes and others in Graph Class in networkx/base.py def is_string_like(obj): # from John Hunter, types-free version """Check if obj is string.""" try: obj + '' except (TypeError, ValueError): return False return True def iterable(obj): """ Return True if obj is iterable with a well-defined len().""" if hasattr(obj,"__iter__"): return True try: len(obj) except: return False return True def flatten(obj, result=None): """ Return flattened version of (possibly nested) iterable object. """ if not iterable(obj) or is_string_like(obj): return obj if result is None: result = [] for item in obj: if not iterable(item) or is_string_like(item): result.append(item) else: flatten(item, result) return obj.__class__(result) def is_list_of_ints( intlist ): """ Return True if list is a list of ints. """ if not isinstance(intlist,list): return False for i in intlist: if not isinstance(i,int): return False return True def make_str(t): """Return the string representation of t.""" if is_string_like(t): return t return str(t) def cumulative_sum(numbers): """Yield cumulative sum of numbers. >>> import networkx.utils as utils >>> list(utils.cumulative_sum([1,2,3,4])) [1, 3, 6, 10] """ csum = 0 for n in numbers: csum += n yield csum def generate_unique_node(): """ Generate a unique node label.""" return str(uuid.uuid1()) def default_opener(filename): """Opens `filename` using system's default program. Parameters ---------- filename : str The path of the file to be opened. """ cmds = {'darwin': ['open'], 'linux2': ['xdg-open'], 'win32': ['cmd.exe', '/C', 'start', '']} cmd = cmds[sys.platform] + [filename] subprocess.call(cmd) def dict_to_numpy_array(d,mapping=None): """Convert a dictionary of dictionaries to a numpy array with optional mapping.""" try: return dict_to_numpy_array2(d, mapping) except AttributeError: return dict_to_numpy_array1(d,mapping) def dict_to_numpy_array2(d,mapping=None): """Convert a dictionary of dictionaries to a 2d numpy array with optional mapping.""" try: import numpy except ImportError: raise ImportError( "dict_to_numpy_array requires numpy : http://scipy.org/ ") if mapping is None: s=set(d.keys()) for k,v in d.items(): s.update(v.keys()) mapping=dict(zip(s,range(len(s)))) n=len(mapping) a = numpy.zeros((n, n)) for k1, row in d.items(): for k2, value in row.items(): i=mapping[k1] j=mapping[k2] a[i,j] = value return a def dict_to_numpy_array1(d,mapping=None): """Convert a dictionary of numbers to a 1d numpy array with optional mapping.""" try: import numpy except ImportError: raise ImportError( "dict_to_numpy_array requires numpy : http://scipy.org/ ") if mapping is None: s = set(d.keys()) mapping = dict(zip(s,range(len(s)))) n = len(mapping) a = numpy.zeros(n) for k1, value in d.items(): i = mapping[k1] a[i] = value return a networkx-1.8.1/networkx/readwrite/0000775000175000017500000000000012177457361017146 5ustar aricaric00000000000000networkx-1.8.1/networkx/readwrite/gexf.py0000664000175000017500000010056612177456333020457 0ustar aricaric00000000000000""" **** GEXF **** Read and write graphs in GEXF format. GEXF (Graph Exchange XML Format) is a language for describing complex network structures, their associated data and dynamics. This implementation does not support mixed graphs (directed and undirected edges together). Format ------ GEXF is an XML format. See http://gexf.net/format/schema.html for the specification and http://gexf.net/format/basic.html for examples. """ # Copyright (C) 2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # Based on GraphML NetworkX GraphML reader __author__ = """\n""".join(['Aric Hagberg ']) __all__ = ['write_gexf', 'read_gexf', 'relabel_gexf_graph', 'generate_gexf'] import itertools import networkx as nx from networkx.utils import open_file, make_str try: from xml.etree.cElementTree import Element, ElementTree, tostring except ImportError: try: from xml.etree.ElementTree import Element, ElementTree, tostring except ImportError: pass @open_file(1,mode='wb') def write_gexf(G, path, encoding='utf-8',prettyprint=True,version='1.1draft'): """Write G in GEXF format to path. "GEXF (Graph Exchange XML Format) is a language for describing complex networks structures, their associated data and dynamics" [1]_. Parameters ---------- G : graph A NetworkX graph path : file or string File or file name to write. File names ending in .gz or .bz2 will be compressed. encoding : string (optional) Encoding for text data. prettyprint : bool (optional) If True use line breaks and indenting in output XML. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_gexf(G, "test.gexf") Notes ----- This implementation does not support mixed graphs (directed and undirected edges together). The node id attribute is set to be the string of the node label. If you want to specify an id use set it as node data, e.g. node['a']['id']=1 to set the id of node 'a' to 1. References ---------- .. [1] GEXF graph format, http://gexf.net/format/ """ writer = GEXFWriter(encoding=encoding,prettyprint=prettyprint, version=version) writer.add_graph(G) writer.write(path) def generate_gexf(G, encoding='utf-8',prettyprint=True,version='1.1draft'): """Generate lines of GEXF format representation of G" "GEXF (Graph Exchange XML Format) is a language for describing complex networks structures, their associated data and dynamics" [1]_. Parameters ---------- G : graph A NetworkX graph encoding : string (optional) Encoding for text data. prettyprint : bool (optional) If True use line breaks and indenting in output XML. Examples -------- >>> G=nx.path_graph(4) >>> linefeed=chr(10) # linefeed=\n >>> s=linefeed.join(nx.generate_gexf(G)) # doctest: +SKIP >>> for line in nx.generate_gexf(G): # doctest: +SKIP ... print line Notes ----- This implementation does not support mixed graphs (directed and undirected edges together). The node id attribute is set to be the string of the node label. If you want to specify an id use set it as node data, e.g. node['a']['id']=1 to set the id of node 'a' to 1. References ---------- .. [1] GEXF graph format, http://gexf.net/format/ """ writer = GEXFWriter(encoding=encoding,prettyprint=prettyprint, version=version) writer.add_graph(G) for line in str(writer).splitlines(): yield line @open_file(0,mode='rb') def read_gexf(path,node_type=None,relabel=False,version='1.1draft'): """Read graph in GEXF format from path. "GEXF (Graph Exchange XML Format) is a language for describing complex networks structures, their associated data and dynamics" [1]_. Parameters ---------- path : file or string File or file name to write. File names ending in .gz or .bz2 will be compressed. node_type: Python type (default: None) Convert node ids to this type if not None. relabel : bool (default: False) If True relabel the nodes to use the GEXF node "label" attribute instead of the node "id" attribute as the NetworkX node label. Returns ------- graph: NetworkX graph If no parallel edges are found a Graph or DiGraph is returned. Otherwise a MultiGraph or MultiDiGraph is returned. Notes ----- This implementation does not support mixed graphs (directed and undirected edges together). References ---------- .. [1] GEXF graph format, http://gexf.net/format/ """ reader = GEXFReader(node_type=node_type,version=version) if relabel: G=relabel_gexf_graph(reader(path)) else: G=reader(path) return G class GEXF(object): # global register_namespace versions={} d={'NS_GEXF':"http://www.gexf.net/1.1draft", 'NS_VIZ':"http://www.gexf.net/1.1draft/viz", 'NS_XSI':"http://www.w3.org/2001/XMLSchema-instance", 'SCHEMALOCATION':' '.join(['http://www.gexf.net/1.1draft', 'http://www.gexf.net/1.1draft/gexf.xsd' ]), 'VERSION':'1.1' } versions['1.1draft']=d d={'NS_GEXF':"http://www.gexf.net/1.2draft", 'NS_VIZ':"http://www.gexf.net/1.2draft/viz", 'NS_XSI':"http://www.w3.org/2001/XMLSchema-instance", 'SCHEMALOCATION':' '.join(['http://www.gexf.net/1.2draft', 'http://www.gexf.net/1.2draft/gexf.xsd' ]), 'VERSION':'1.2' } versions['1.2draft']=d types=[(int,"integer"), (float,"float"), (float,"double"), (bool,"boolean"), (list,"string"), (dict,"string"), ] try: # Python 3.x blurb = chr(1245) # just to trigger the exception types.extend([ (str,"liststring"), (str,"anyURI"), (str,"string")]) except ValueError: # Python 2.6+ types.extend([ (str,"liststring"), (str,"anyURI"), (str,"string"), (unicode,"liststring"), (unicode,"anyURI"), (unicode,"string")]) xml_type = dict(types) python_type = dict(reversed(a) for a in types) convert_bool={'true':True,'false':False} # try: # register_namespace = ET.register_namespace # except AttributeError: # def register_namespace(prefix, uri): # ET._namespace_map[uri] = prefix def set_version(self,version): d=self.versions.get(version) if d is None: raise nx.NetworkXError('Unknown GEXF version %s'%version) self.NS_GEXF = d['NS_GEXF'] self.NS_VIZ = d['NS_VIZ'] self.NS_XSI = d['NS_XSI'] self.SCHEMALOCATION = d['NS_XSI'] self.VERSION=d['VERSION'] self.version=version # register_namespace('viz', d['NS_VIZ']) class GEXFWriter(GEXF): # class for writing GEXF format files # use write_gexf() function def __init__(self, graph=None, encoding="utf-8", mode='static',prettyprint=True, version='1.1draft'): try: import xml.etree.ElementTree except ImportError: raise ImportError('GEXF writer requires ' 'xml.elementtree.ElementTree') self.prettyprint=prettyprint self.mode=mode self.encoding = encoding self.set_version(version) self.xml = Element("gexf", {'xmlns':self.NS_GEXF, 'xmlns:xsi':self.NS_XSI, 'xmlns:viz':self.NS_VIZ, 'xsi:schemaLocation':self.SCHEMALOCATION, 'version':self.VERSION}) # counters for edge and attribute identifiers self.edge_id=itertools.count() self.attr_id=itertools.count() # default attributes are stored in dictionaries self.attr={} self.attr['node']={} self.attr['edge']={} self.attr['node']['dynamic']={} self.attr['node']['static']={} self.attr['edge']['dynamic']={} self.attr['edge']['static']={} if graph is not None: self.add_graph(graph) def __str__(self): if self.prettyprint: self.indent(self.xml) s=tostring(self.xml).decode(self.encoding) return s def add_graph(self, G): # Add a graph element to the XML if G.is_directed(): default='directed' else: default='undirected' graph_element = Element("graph",defaultedgetype=default,mode=self.mode) self.graph_element=graph_element self.add_nodes(G,graph_element) self.add_edges(G,graph_element) self.xml.append(graph_element) def add_nodes(self, G, graph_element): nodes_element = Element('nodes') for node,data in G.nodes_iter(data=True): node_data=data.copy() node_id = make_str(node_data.pop('id', node)) kw={'id':node_id} label = make_str(node_data.pop('label', node)) kw['label']=label try: pid=node_data.pop('pid') kw['pid'] = make_str(pid) except KeyError: pass # add node element with attributes node_element = Element("node", **kw) # add node element and attr subelements default=G.graph.get('node_default',{}) node_data=self.add_parents(node_element, node_data) if self.version=='1.1': node_data=self.add_slices(node_element, node_data) else: node_data=self.add_spells(node_element, node_data) node_data=self.add_viz(node_element,node_data) node_data=self.add_attributes("node", node_element, node_data, default) nodes_element.append(node_element) graph_element.append(nodes_element) def add_edges(self, G, graph_element): def edge_key_data(G): # helper function to unify multigraph and graph edge iterator if G.is_multigraph(): for u,v,key,data in G.edges_iter(data=True,keys=True): edge_data=data.copy() edge_data.update(key=key) edge_id=edge_data.pop('id',None) if edge_id is None: edge_id=next(self.edge_id) yield u,v,edge_id,edge_data else: for u,v,data in G.edges_iter(data=True): edge_data=data.copy() edge_id=edge_data.pop('id',None) if edge_id is None: edge_id=next(self.edge_id) yield u,v,edge_id,edge_data edges_element = Element('edges') for u,v,key,edge_data in edge_key_data(G): kw={'id':make_str(key)} try: edge_weight=edge_data.pop('weight') kw['weight']=make_str(edge_weight) except KeyError: pass try: edge_type=edge_data.pop('type') kw['type']=make_str(edge_type) except KeyError: pass source_id = make_str(G.node[u].get('id', u)) target_id = make_str(G.node[v].get('id', v)) edge_element = Element("edge", source=source_id,target=target_id, **kw) default=G.graph.get('edge_default',{}) edge_data=self.add_viz(edge_element,edge_data) edge_data=self.add_attributes("edge", edge_element, edge_data, default) edges_element.append(edge_element) graph_element.append(edges_element) def add_attributes(self, node_or_edge, xml_obj, data, default): # Add attrvalues to node or edge attvalues=Element('attvalues') if len(data)==0: return data if 'start' in data or 'end' in data: mode='dynamic' else: mode='static' for k,v in data.items(): # rename generic multigraph key to avoid any name conflict if k == 'key': k='networkx_key' attr_id = self.get_attr_id(make_str(k), self.xml_type[type(v)], node_or_edge, default, mode) if type(v)==list: # dynamic data for val,start,end in v: e=Element("attvalue") e.attrib['for']=attr_id e.attrib['value']=make_str(val) if start is not None: e.attrib['start']=make_str(start) if end is not None: e.attrib['end']=make_str(end) attvalues.append(e) else: # static data e=Element("attvalue") e.attrib['for']=attr_id e.attrib['value']=make_str(v) attvalues.append(e) xml_obj.append(attvalues) return data def get_attr_id(self, title, attr_type, edge_or_node, default, mode): # find the id of the attribute or generate a new id try: return self.attr[edge_or_node][mode][title] except KeyError: # generate new id new_id=str(next(self.attr_id)) self.attr[edge_or_node][mode][title] = new_id attr_kwargs = {"id":new_id, "title":title, "type":attr_type} attribute=Element("attribute",**attr_kwargs) # add subelement for data default value if present default_title=default.get(title) if default_title is not None: default_element=Element("default") default_element.text=make_str(default_title) attribute.append(default_element) # new insert it into the XML attributes_element=None for a in self.graph_element.findall("attributes"): # find existing attributes element by class and mode a_class=a.get('class') a_mode=a.get('mode','static') # default mode is static if a_class==edge_or_node and a_mode==mode: attributes_element=a if attributes_element is None: # create new attributes element attr_kwargs = {"mode":mode,"class":edge_or_node} attributes_element=Element('attributes', **attr_kwargs) self.graph_element.insert(0,attributes_element) attributes_element.append(attribute) return new_id def add_viz(self,element,node_data): viz=node_data.pop('viz',False) if viz: color=viz.get('color') if color is not None: if self.VERSION=='1.1': e=Element("{%s}color"%self.NS_VIZ, r=str(color.get('r')), g=str(color.get('g')), b=str(color.get('b')), ) else: e=Element("{%s}color"%self.NS_VIZ, r=str(color.get('r')), g=str(color.get('g')), b=str(color.get('b')), a=str(color.get('a')), ) element.append(e) size=viz.get('size') if size is not None: e=Element("{%s}size"%self.NS_VIZ,value=str(size)) element.append(e) thickness=viz.get('thickness') if thickness is not None: e=Element("{%s}thickness"%self.NS_VIZ,value=str(thickness)) element.append(e) shape=viz.get('shape') if shape is not None: if shape.startswith('http'): e=Element("{%s}shape"%self.NS_VIZ, value='image',uri=str(shape)) else: e=Element("{%s}shape"%self.NS_VIZ,value=str(shape)) element.append(e) position=viz.get('position') if position is not None: e=Element("{%s}position"%self.NS_VIZ, x=str(position.get('x')), y=str(position.get('y')), z=str(position.get('z')), ) element.append(e) return node_data def add_parents(self,node_element,node_data): parents=node_data.pop('parents',False) if parents: parents_element=Element('parents') for p in parents: e=Element('parent') e.attrib['for']=str(p) parents_element.append(e) node_element.append(parents_element) return node_data def add_slices(self,node_element,node_data): slices=node_data.pop('slices',False) if slices: slices_element=Element('slices') for start,end in slices: e=Element('slice',start=str(start),end=str(end)) slices_element.append(e) node_element.append(slices_element) return node_data def add_spells(self,node_element,node_data): spells=node_data.pop('spells',False) if spells: spells_element=Element('spells') for start,end in spells: e=Element('spell') if start is not None: e.attrib['start']=make_str(start) if end is not None: e.attrib['end']=make_str(end) spells_element.append(e) node_element.append(spells_element) return node_data def write(self, fh): # Serialize graph G in GEXF to the open fh if self.prettyprint: self.indent(self.xml) document = ElementTree(self.xml) header=''%self.encoding fh.write(header.encode(self.encoding)) document.write(fh, encoding=self.encoding) def indent(self, elem, level=0): # in-place prettyprint formatter i = "\n" + level*" " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: self.indent(elem, level+1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i class GEXFReader(GEXF): # Class to read GEXF format files # use read_gexf() function def __init__(self, node_type=None,version='1.1draft'): try: import xml.etree.ElementTree except ImportError: raise ImportError('GEXF reader requires ' 'xml.elementtree.ElementTree') self.node_type=node_type # assume simple graph and test for multigraph on read self.simple_graph=True self.set_version(version) def __call__(self, stream): self.xml = ElementTree(file=stream) g=self.xml.find("{%s}graph" % self.NS_GEXF) if g is not None: return self.make_graph(g) # try all the versions for version in self.versions: self.set_version(version) g=self.xml.find("{%s}graph" % self.NS_GEXF) if g is not None: return self.make_graph(g) raise nx.NetworkXError("No element in GEXF file") def make_graph(self, graph_xml): # mode is "static" or "dynamic" graph_mode = graph_xml.get("mode", "") self.dynamic=(graph_mode=='dynamic') # start with empty DiGraph or MultiDiGraph edgedefault = graph_xml.get("defaultedgetype", None) if edgedefault=='directed': G=nx.MultiDiGraph() else: G=nx.MultiGraph() # graph attributes graph_start=graph_xml.get('start') if graph_start is not None: G.graph['start']=graph_start graph_end=graph_xml.get('end') if graph_end is not None: G.graph['end']=graph_end # node and edge attributes attributes_elements=graph_xml.findall("{%s}attributes"%self.NS_GEXF) # dictionaries to hold attributes and attribute defaults node_attr={} node_default={} edge_attr={} edge_default={} for a in attributes_elements: attr_class = a.get("class") if attr_class=='node': na,nd = self.find_gexf_attributes(a) node_attr.update(na) node_default.update(nd) G.graph['node_default']=node_default elif attr_class=='edge': ea,ed = self.find_gexf_attributes(a) edge_attr.update(ea) edge_default.update(ed) G.graph['edge_default']=edge_default else: raise # unknown attribute class # Hack to handle Gephi0.7beta bug # add weight attribute ea={'weight':{'type': 'double', 'mode': 'static', 'title': 'weight'}} ed={} edge_attr.update(ea) edge_default.update(ed) G.graph['edge_default']=edge_default # add nodes nodes_element=graph_xml.find("{%s}nodes" % self.NS_GEXF) if nodes_element is not None: for node_xml in nodes_element.findall("{%s}node" % self.NS_GEXF): self.add_node(G, node_xml, node_attr) # add edges edges_element=graph_xml.find("{%s}edges" % self.NS_GEXF) if edges_element is not None: for edge_xml in edges_element.findall("{%s}edge" % self.NS_GEXF): self.add_edge(G, edge_xml, edge_attr) # switch to Graph or DiGraph if no parallel edges were found. if self.simple_graph: if G.is_directed(): G=nx.DiGraph(G) else: G=nx.Graph(G) return G def add_node(self, G, node_xml, node_attr, node_pid=None): # add a single node with attributes to the graph # get attributes and subattributues for node data = self.decode_attr_elements(node_attr, node_xml) data = self.add_parents(data, node_xml) # add any parents if self.version=='1.1': data = self.add_slices(data, node_xml) # add slices else: data = self.add_spells(data, node_xml) # add spells data = self.add_viz(data, node_xml) # add viz data = self.add_start_end(data, node_xml) # add start/end # find the node id and cast it to the appropriate type node_id = node_xml.get("id") if self.node_type is not None: node_id=self.node_type(node_id) # every node should have a label node_label = node_xml.get("label") data['label']=node_label # parent node id node_pid = node_xml.get("pid", node_pid) if node_pid is not None: data['pid']=node_pid # check for subnodes, recursive subnodes=node_xml.find("{%s}nodes" % self.NS_GEXF) if subnodes is not None: for node_xml in subnodes.findall("{%s}node" % self.NS_GEXF): self.add_node(G, node_xml, node_attr, node_pid=node_id) G.add_node(node_id, data) def add_start_end(self, data, xml): # start and end times node_start = xml.get("start") if node_start is not None: data['start']=node_start node_end = xml.get("end") if node_end is not None: data['end']=node_end return data def add_viz(self, data, node_xml): # add viz element for node viz={} color=node_xml.find("{%s}color"%self.NS_VIZ) if color is not None: if self.VERSION=='1.1': viz['color']={'r':int(color.get('r')), 'g':int(color.get('g')), 'b':int(color.get('b'))} else: viz['color']={'r':int(color.get('r')), 'g':int(color.get('g')), 'b':int(color.get('b')), 'a':float(color.get('a', 1)), } size=node_xml.find("{%s}size"%self.NS_VIZ) if size is not None: viz['size']=float(size.get('value')) thickness=node_xml.find("{%s}thickness"%self.NS_VIZ) if thickness is not None: viz['thickness']=float(thickness.get('value')) shape=node_xml.find("{%s}shape"%self.NS_VIZ) if shape is not None: viz['shape']=shape.get('shape') if viz['shape']=='image': viz['shape']=shape.get('uri') position=node_xml.find("{%s}position"%self.NS_VIZ) if position is not None: viz['position']={'x':float(position.get('x',0)), 'y':float(position.get('y',0)), 'z':float(position.get('z',0))} if len(viz)>0: data['viz']=viz return data def add_parents(self, data, node_xml): parents_element=node_xml.find("{%s}parents"%self.NS_GEXF) if parents_element is not None: data['parents']=[] for p in parents_element.findall("{%s}parent"%self.NS_GEXF): parent=p.get('for') data['parents'].append(parent) return data def add_slices(self, data, node_xml): slices_element=node_xml.find("{%s}slices"%self.NS_GEXF) if slices_element is not None: data['slices']=[] for s in slices_element.findall("{%s}slice"%self.NS_GEXF): start=s.get('start') end=s.get('end') data['slices'].append((start,end)) return data def add_spells(self, data, node_xml): spells_element=node_xml.find("{%s}spells"%self.NS_GEXF) if spells_element is not None: data['spells']=[] for s in spells_element.findall("{%s}spell"%self.NS_GEXF): start=s.get('start') end=s.get('end') data['spells'].append((start,end)) return data def add_edge(self, G, edge_element, edge_attr): # add an edge to the graph # raise error if we find mixed directed and undirected edges edge_direction = edge_element.get("type") if G.is_directed() and edge_direction=='undirected': raise nx.NetworkXError(\ "Undirected edge found in directed graph.") if (not G.is_directed()) and edge_direction=='directed': raise nx.NetworkXError(\ "Directed edge found in undirected graph.") # Get source and target and recast type if required source = edge_element.get("source") target = edge_element.get("target") if self.node_type is not None: source=self.node_type(source) target=self.node_type(target) data = self.decode_attr_elements(edge_attr, edge_element) data = self.add_start_end(data,edge_element) # GEXF stores edge ids as an attribute # NetworkX uses them as keys in multigraphs # if networkx_key is not specified as an attribute edge_id = edge_element.get("id") if edge_id is not None: data["id"] = edge_id # check if there is a 'multigraph_key' and use that as edge_id multigraph_key = data.pop('networkx_key',None) if multigraph_key is not None: edge_id=multigraph_key weight = edge_element.get('weight') if weight is not None: data['weight']=float(weight) edge_label = edge_element.get("label") if edge_label is not None: data['label']=edge_label if G.has_edge(source,target): # seen this edge before - this is a multigraph self.simple_graph=False G.add_edge(source, target, key=edge_id, **data) if edge_direction=='mutual': G.add_edge(target, source, key=edge_id, **data) def decode_attr_elements(self, gexf_keys, obj_xml): # Use the key information to decode the attr XML attr = {} # look for outer "" element attr_element=obj_xml.find("{%s}attvalues" % self.NS_GEXF) if attr_element is not None: # loop over elements for a in attr_element.findall("{%s}attvalue" % self.NS_GEXF): key = a.get('for') # for is required try: # should be in our gexf_keys dictionary title=gexf_keys[key]['title'] except KeyError: raise nx.NetworkXError("No attribute defined for=%s"%key) atype=gexf_keys[key]['type'] value=a.get('value') if atype=='boolean': value=self.convert_bool[value] else: value=self.python_type[atype](value) if gexf_keys[key]['mode']=='dynamic': # for dynamic graphs use list of three-tuples # [(value1,start1,end1), (value2,start2,end2), etc] start=a.get('start') end=a.get('end') if title in attr: attr[title].append((value,start,end)) else: attr[title]=[(value,start,end)] else: # for static graphs just assign the value attr[title] = value return attr def find_gexf_attributes(self, attributes_element): # Extract all the attributes and defaults attrs = {} defaults = {} mode=attributes_element.get('mode') for k in attributes_element.findall("{%s}attribute" % self.NS_GEXF): attr_id = k.get("id") title=k.get('title') atype=k.get('type') attrs[attr_id]={'title':title,'type':atype,'mode':mode} # check for the "default" subelement of key element and add default=k.find("{%s}default" % self.NS_GEXF) if default is not None: if atype=='boolean': value=self.convert_bool[default.text] else: value=self.python_type[atype](default.text) defaults[title]=value return attrs,defaults def relabel_gexf_graph(G): """Relabel graph using "label" node keyword for node label. Parameters ---------- G : graph A NetworkX graph read from GEXF data Returns ------- H : graph A NetworkX graph with relabed nodes Notes ----- This function relabels the nodes in a NetworkX graph with the "label" attribute. It also handles relabeling the specific GEXF node attributes "parents", and "pid". """ # build mapping of node labels, do some error checking try: mapping=[(u,G.node[u]['label']) for u in G] except KeyError: raise nx.NetworkXError('Failed to relabel nodes: ' 'missing node labels found. ' 'Use relabel=False.') x,y=zip(*mapping) if len(set(y))!=len(G): raise nx.NetworkXError('Failed to relabel nodes: ' 'duplicate node labels found. ' 'Use relabel=False.') mapping=dict(mapping) H=nx.relabel_nodes(G,mapping) # relabel attributes for n in G: m=mapping[n] H.node[m]['id']=n H.node[m].pop('label') if 'pid' in H.node[m]: H.node[m]['pid']=mapping[G.node[n]['pid']] if 'parents' in H.node[m]: H.node[m]['parents']=[mapping[p] for p in G.node[n]['parents']] return H # fixture for nose tests def setup_module(module): from nose import SkipTest try: import xml.etree.cElementTree except: raise SkipTest("xml.etree.cElementTree not available") # fixture for nose tests def teardown_module(module): import os try: os.unlink('test.gexf') except: pass networkx-1.8.1/networkx/readwrite/p2g.py0000664000175000017500000000631712177456333020215 0ustar aricaric00000000000000""" This module provides the following: read and write of p2g format used in metabolic pathway studies. See http://www.cs.purdue.edu/homes/koyuturk/pathway/ for a description. The summary is included here: A file that describes a uniquely labeled graph (with extension ".gr") format looks like the following: name 3 4 a 1 2 b c 0 2 "name" is simply a description of what the graph corresponds to. The second line displays the number of nodes and number of edges, respectively. This sample graph contains three nodes labeled "a", "b", and "c". The rest of the graph contains two lines for each node. The first line for a node contains the node label. After the declaration of the node label, the out-edges of that node in the graph are provided. For instance, "a" is linked to nodes 1 and 2, which are labeled "b" and "c", while the node labeled "b" has no outgoing edges. Observe that node labeled "c" has an outgoing edge to itself. Indeed, self-loops are allowed. Node index starts from 0. """ # Copyright (C) 2008-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx from networkx.utils import is_string_like,open_file __author__ = '\n'.join(['Willem Ligtenberg (w.p.a.ligtenberg@tue.nl)', 'Aric Hagberg (aric.hagberg@gmail.com)']) @open_file(1,mode='w') def write_p2g(G, path, encoding = 'utf-8'): """Write NetworkX graph in p2g format. Notes ----- This format is meant to be used with directed graphs with possible self loops. """ path.write(("%s\n"%G.name).encode(encoding)) path.write(("%s %s\n"%(G.order(),G.size())).encode(encoding)) nodes = G.nodes() # make dictionary mapping nodes to integers nodenumber=dict(zip(nodes,range(len(nodes)))) for n in nodes: path.write(("%s\n"%n).encode(encoding)) for nbr in G.neighbors(n): path.write(("%s "%nodenumber[nbr]).encode(encoding)) path.write("\n".encode(encoding)) @open_file(0,mode='r') def read_p2g(path, encoding='utf-8'): """Read graph in p2g format from path. Returns ------- MultiDiGraph Notes ----- If you want a DiGraph (with no self loops allowed and no edge data) use D=networkx.DiGraph(read_p2g(path)) """ lines = (line.decode(encoding) for line in path) G=parse_p2g(lines) return G def parse_p2g(lines): """Parse p2g format graph from string or iterable. Returns ------- MultiDiGraph """ description = next(lines).strip() # are multiedges (parallel edges) allowed? G=networkx.MultiDiGraph(name=description,selfloops=True) nnodes,nedges=map(int,next(lines).split()) nodelabel={} nbrs={} # loop over the nodes keeping track of node labels and out neighbors # defer adding edges until all node labels are known for i in range(nnodes): n=next(lines).strip() nodelabel[i]=n G.add_node(n) nbrs[n]=map(int,next(lines).split()) # now we know all of the node labels so we can add the edges # with the correct labels for n in G: for nbr in nbrs[n]: G.add_edge(n,nodelabel[nbr]) return G networkx-1.8.1/networkx/readwrite/adjlist.py0000664000175000017500000002045212177456333021153 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ************** Adjacency List ************** Read and write NetworkX graphs as adjacency lists. Adjacency list format is useful for graphs without data associated with nodes or edges and for nodes that can be meaningfully represented as strings. Format ------ The adjacency list format consists of lines with node labels. The first label in a line is the source node. Further labels in the line are considered target nodes and are added to the graph along with an edge between the source node and target node. The graph with edges a-b, a-c, d-e can be represented as the following adjacency list (anything following the # in a line is a comment):: a b c # source target target d e """ __author__ = '\n'.join(['Aric Hagberg ', 'Dan Schult ', 'Loïc Séguin-C. ']) # Copyright (C) 2004-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['generate_adjlist', 'write_adjlist', 'parse_adjlist', 'read_adjlist'] from networkx.utils import make_str, open_file import networkx as nx def generate_adjlist(G, delimiter = ' '): """Generate a single line of the graph G in adjacency list format. Parameters ---------- G : NetworkX graph delimiter : string, optional Separator for node labels Returns ------- lines : string Lines of data in adjlist format. Examples -------- >>> G = nx.lollipop_graph(4, 3) >>> for line in nx.generate_adjlist(G): ... print(line) 0 1 2 3 1 2 3 2 3 3 4 4 5 5 6 6 See Also -------- write_adjlist, read_adjlist """ directed=G.is_directed() seen=set() for s,nbrs in G.adjacency_iter(): line = make_str(s)+delimiter for t,data in nbrs.items(): if not directed and t in seen: continue if G.is_multigraph(): for d in data.values(): line += make_str(t) + delimiter else: line += make_str(t) + delimiter if not directed: seen.add(s) yield line[:-len(delimiter)] @open_file(1,mode='wb') def write_adjlist(G, path, comments="#", delimiter=' ', encoding = 'utf-8'): """Write graph G in single-line adjacency-list format to path. Parameters ---------- G : NetworkX graph path : string or file Filename or file handle for data output. Filenames ending in .gz or .bz2 will be compressed. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels encoding : string, optional Text encoding. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_adjlist(G,"test.adjlist") The path can be a filehandle or a string with the name of the file. If a filehandle is provided, it has to be opened in 'wb' mode. >>> fh=open("test.adjlist",'wb') >>> nx.write_adjlist(G, fh) Notes ----- This format does not store graph, node, or edge data. See Also -------- read_adjlist, generate_adjlist """ import sys import time pargs=comments + " ".join(sys.argv) + '\n' header = (pargs + comments + " GMT %s\n" % (time.asctime(time.gmtime())) + comments + " %s\n" % (G.name)) path.write(header.encode(encoding)) for line in generate_adjlist(G, delimiter): line+='\n' path.write(line.encode(encoding)) def parse_adjlist(lines, comments = '#', delimiter = None, create_using = None, nodetype = None): """Parse lines of a graph adjacency list representation. Parameters ---------- lines : list or iterator of strings Input data in adjlist format create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. nodetype : Python type, optional Convert nodes to this type. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels. The default is whitespace. create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. Returns ------- G: NetworkX graph The graph corresponding to the lines in adjacency list format. Examples -------- >>> lines = ['1 2 5', ... '2 3 4', ... '3 5', ... '4', ... '5'] >>> G = nx.parse_adjlist(lines, nodetype = int) >>> G.nodes() [1, 2, 3, 4, 5] >>> G.edges() [(1, 2), (1, 5), (2, 3), (2, 4), (3, 5)] See Also -------- read_adjlist """ if create_using is None: G=nx.Graph() else: try: G=create_using G.clear() except: raise TypeError("Input graph is not a NetworkX graph type") for line in lines: p=line.find(comments) if p>=0: line = line[:p] if not len(line): continue vlist=line.strip().split(delimiter) u=vlist.pop(0) # convert types if nodetype is not None: try: u=nodetype(u) except: raise TypeError("Failed to convert node (%s) to type %s"\ %(u,nodetype)) G.add_node(u) if nodetype is not None: try: vlist=map(nodetype,vlist) except: raise TypeError("Failed to convert nodes (%s) to type %s"\ %(','.join(vlist),nodetype)) G.add_edges_from([(u, v) for v in vlist]) return G @open_file(0,mode='rb') def read_adjlist(path, comments="#", delimiter=None, create_using=None, nodetype=None, encoding = 'utf-8'): """Read graph in adjacency list format from path. Parameters ---------- path : string or file Filename or file handle to read. Filenames ending in .gz or .bz2 will be uncompressed. create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. nodetype : Python type, optional Convert nodes to this type. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels. The default is whitespace. create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. Returns ------- G: NetworkX graph The graph corresponding to the lines in adjacency list format. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_adjlist(G, "test.adjlist") >>> G=nx.read_adjlist("test.adjlist") The path can be a filehandle or a string with the name of the file. If a filehandle is provided, it has to be opened in 'rb' mode. >>> fh=open("test.adjlist", 'rb') >>> G=nx.read_adjlist(fh) Filenames ending in .gz or .bz2 will be compressed. >>> nx.write_adjlist(G,"test.adjlist.gz") >>> G=nx.read_adjlist("test.adjlist.gz") The optional nodetype is a function to convert node strings to nodetype. For example >>> G=nx.read_adjlist("test.adjlist", nodetype=int) will attempt to convert all nodes to integer type. Since nodes must be hashable, the function nodetype must return hashable types (e.g. int, float, str, frozenset - or tuples of those, etc.) The optional create_using parameter is a NetworkX graph container. The default is Graph(), an undirected graph. To read the data as a directed graph use >>> G=nx.read_adjlist("test.adjlist", create_using=nx.DiGraph()) Notes ----- This format does not store graph or node data. See Also -------- write_adjlist """ lines = (line.decode(encoding) for line in path) return parse_adjlist(lines, comments = comments, delimiter = delimiter, create_using = create_using, nodetype = nodetype) # fixture for nose tests def teardown_module(module): import os os.unlink('test.adjlist') os.unlink('test.adjlist.gz') networkx-1.8.1/networkx/readwrite/__init__.py0000664000175000017500000000106512177456333021257 0ustar aricaric00000000000000""" A package for reading and writing graphs in various formats. """ from networkx.readwrite.adjlist import * from networkx.readwrite.multiline_adjlist import * from networkx.readwrite.edgelist import * from networkx.readwrite.gpickle import * from networkx.readwrite.pajek import * from networkx.readwrite.leda import * from networkx.readwrite.sparsegraph6 import * from networkx.readwrite.nx_yaml import * from networkx.readwrite.gml import * from networkx.readwrite.graphml import * from networkx.readwrite.gexf import * from networkx.readwrite.nx_shp import * networkx-1.8.1/networkx/readwrite/leda.py0000664000175000017500000000543712177456333020434 0ustar aricaric00000000000000""" Read graphs in LEDA format. LEDA is a C++ class library for efficient data types and algorithms. Format ------ See http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html """ # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['read_leda', 'parse_leda'] import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file, is_string_like @open_file(0,mode='rb') def read_leda(path, encoding='UTF-8'): """Read graph in LEDA format from path. Parameters ---------- path : file or string File or filename to read. Filenames ending in .gz or .bz2 will be uncompressed. Returns ------- G : NetworkX graph Examples -------- G=nx.read_leda('file.leda') References ---------- .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html """ lines=(line.decode(encoding) for line in path) G=parse_leda(lines) return G def parse_leda(lines): """Read graph in LEDA format from string or iterable. Parameters ---------- lines : string or iterable Data in LEDA format. Returns ------- G : NetworkX graph Examples -------- G=nx.parse_leda(string) References ---------- .. [1] http://www.algorithmic-solutions.info/leda_guide/graphs/leda_native_graph_fileformat.html """ if is_string_like(lines): lines=iter(lines.split('\n')) lines = iter([line.rstrip('\n') for line in lines \ if not (line.startswith('#') or line.startswith('\n') or line=='')]) for i in range(3): next(lines) # Graph du = int(next(lines)) # -1=directed, -2=undirected if du==-1: G = nx.DiGraph() else: G = nx.Graph() # Nodes n =int(next(lines)) # number of nodes node={} for i in range(1,n+1): # LEDA counts from 1 to n symbol=next(lines).rstrip().strip('|{}| ') if symbol=="": symbol=str(i) # use int if no label - could be trouble node[i]=symbol G.add_nodes_from([s for i,s in node.items()]) # Edges m = int(next(lines)) # number of edges for i in range(m): try: s,t,reversal,label=next(lines).split() except: raise NetworkXError('Too few fields in LEDA.GRAPH edge %d'%(i+1)) # BEWARE: no handling of reversal edges G.add_edge(node[int(s)],node[int(t)],label=label[2:-2]) return G networkx-1.8.1/networkx/readwrite/graphml.py0000664000175000017500000005141612177456333021157 0ustar aricaric00000000000000""" ******* GraphML ******* Read and write graphs in GraphML format. This implementation does not support mixed graphs (directed and unidirected edges together), hyperedges, nested graphs, or ports. "GraphML is a comprehensive and easy-to-use file format for graphs. It consists of a language core to describe the structural properties of a graph and a flexible extension mechanism to add application-specific data. Its main features include support of * directed, undirected, and mixed graphs, * hypergraphs, * hierarchical graphs, * graphical representations, * references to external data, * application-specific attribute data, and * light-weight parsers. Unlike many other file formats for graphs, GraphML does not use a custom syntax. Instead, it is based on XML and hence ideally suited as a common denominator for all kinds of services generating, archiving, or processing graphs." http://graphml.graphdrawing.org/ Format ------ GraphML is an XML format. See http://graphml.graphdrawing.org/specification.html for the specification and http://graphml.graphdrawing.org/primer/graphml-primer.html for examples. """ __author__ = """\n""".join(['Salim Fadhley', 'Aric Hagberg (hagberg@lanl.gov)' ]) __all__ = ['write_graphml', 'read_graphml', 'generate_graphml', 'parse_graphml', 'GraphMLWriter', 'GraphMLReader'] import networkx as nx from networkx.utils import open_file, make_str import warnings try: from xml.etree.cElementTree import Element, ElementTree, tostring, fromstring except ImportError: try: from xml.etree.ElementTree import Element, ElementTree, tostring, fromstring except ImportError: pass @open_file(1,mode='wb') def write_graphml(G, path, encoding='utf-8',prettyprint=True): """Write G in GraphML XML format to path Parameters ---------- G : graph A networkx graph path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be compressed. encoding : string (optional) Encoding for text data. prettyprint : bool (optional) If True use line breaks and indenting in output XML. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_graphml(G, "test.graphml") Notes ----- This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ writer = GraphMLWriter(encoding=encoding,prettyprint=prettyprint) writer.add_graph_element(G) writer.dump(path) def generate_graphml(G, encoding='utf-8',prettyprint=True): """Generate GraphML lines for G Parameters ---------- G : graph A networkx graph encoding : string (optional) Encoding for text data. prettyprint : bool (optional) If True use line breaks and indenting in output XML. Examples -------- >>> G=nx.path_graph(4) >>> linefeed=chr(10) # linefeed=\n >>> s=linefeed.join(nx.generate_graphml(G)) # doctest: +SKIP >>> for line in nx.generate_graphml(G): # doctest: +SKIP ... print(line) Notes ----- This implementation does not support mixed graphs (directed and unidirected edges together) hyperedges, nested graphs, or ports. """ writer = GraphMLWriter(encoding=encoding,prettyprint=prettyprint) writer.add_graph_element(G) for line in str(writer).splitlines(): yield line @open_file(0,mode='rb') def read_graphml(path,node_type=str): """Read graph in GraphML format from path. Parameters ---------- path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be compressed. node_type: Python type (default: str) Convert node ids to this type Returns ------- graph: NetworkX graph If no parallel edges are found a Graph or DiGraph is returned. Otherwise a MultiGraph or MultiDiGraph is returned. Notes ----- This implementation does not support mixed graphs (directed and unidirected edges together), hypergraphs, nested graphs, or ports. For multigraphs the GraphML edge "id" will be used as the edge key. If not specified then they "key" attribute will be used. If there is no "key" attribute a default NetworkX multigraph edge key will be provided. Files with the yEd "yfiles" extension will can be read but the graphics information is discarded. yEd compressed files ("file.graphmlz" extension) can be read by renaming the file to "file.graphml.gz". """ reader = GraphMLReader(node_type=node_type) # need to check for multiple graphs glist=list(reader(path=path)) return glist[0] def parse_graphml(graphml_string,node_type=str): """Read graph in GraphML format from string. Parameters ---------- graphml_string : string String containing graphml information (e.g., contents of a graphml file). node_type: Python type (default: str) Convert node ids to this type Returns ------- graph: NetworkX graph If no parallel edges are found a Graph or DiGraph is returned. Otherwise a MultiGraph or MultiDiGraph is returned. Examples -------- >>> G=nx.path_graph(4) >>> linefeed=chr(10) # linefeed=\n >>> s=linefeed.join(nx.generate_graphml(G)) >>> H=nx.parse_graphml(s) Notes ----- This implementation does not support mixed graphs (directed and unidirected edges together), hypergraphs, nested graphs, or ports. For multigraphs the GraphML edge "id" will be used as the edge key. If not specified then they "key" attribute will be used. If there is no "key" attribute a default NetworkX multigraph edge key will be provided. """ reader = GraphMLReader(node_type=node_type) # need to check for multiple graphs glist=list(reader(string=graphml_string)) return glist[0] class GraphML(object): NS_GRAPHML = "http://graphml.graphdrawing.org/xmlns" NS_XSI = "http://www.w3.org/2001/XMLSchema-instance" #xmlns:y="http://www.yworks.com/xml/graphml" NS_Y = "http://www.yworks.com/xml/graphml" SCHEMALOCATION = \ ' '.join(['http://graphml.graphdrawing.org/xmlns', 'http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd']) try: chr(12345) # Fails on Py!=3. unicode = str # Py3k's str is our unicode type long = int # Py3K's int is our long type except ValueError: # Python 2.x pass types=[(int,"integer"), # for Gephi GraphML bug (str,"yfiles"),(str,"string"), (unicode,"string"), (int,"int"), (long,"long"), (float,"float"), (float,"double"), (bool, "boolean")] xml_type = dict(types) python_type = dict(reversed(a) for a in types) convert_bool={'true':True,'false':False, 'True': True, 'False': False} class GraphMLWriter(GraphML): def __init__(self, graph=None, encoding="utf-8",prettyprint=True): try: import xml.etree.ElementTree except ImportError: raise ImportError('GraphML writer requires ' 'xml.elementtree.ElementTree') self.prettyprint=prettyprint self.encoding = encoding self.xml = Element("graphml", {'xmlns':self.NS_GRAPHML, 'xmlns:xsi':self.NS_XSI, 'xsi:schemaLocation':self.SCHEMALOCATION} ) self.keys={} if graph is not None: self.add_graph_element(graph) def __str__(self): if self.prettyprint: self.indent(self.xml) s=tostring(self.xml).decode(self.encoding) return s def get_key(self, name, attr_type, scope, default): keys_key = (name, attr_type, scope) try: return self.keys[keys_key] except KeyError: new_id = "d%i" % len(list(self.keys)) self.keys[keys_key] = new_id key_kwargs = {"id":new_id, "for":scope, "attr.name":name, "attr.type":attr_type} key_element=Element("key",**key_kwargs) # add subelement for data default value if present if default is not None: default_element=Element("default") default_element.text=make_str(default) key_element.append(default_element) self.xml.insert(0,key_element) return new_id def add_data(self, name, element_type, value, scope="all", default=None): """ Make a data element for an edge or a node. Keep a log of the type in the keys table. """ if element_type not in self.xml_type: raise nx.NetworkXError('GraphML writer does not support ' '%s as data values.'%element_type) key_id = self.get_key(name, self.xml_type[element_type], scope, default) data_element = Element("data", key=key_id) data_element.text = make_str(value) return data_element def add_attributes(self, scope, xml_obj, data, default): """Appends attributes to edges or nodes. """ for k,v in data.items(): default_value=default.get(k) obj=self.add_data(make_str(k), type(v), make_str(v), scope=scope, default=default_value) xml_obj.append(obj) def add_nodes(self, G, graph_element): for node,data in G.nodes_iter(data=True): node_element = Element("node", id = make_str(node)) default=G.graph.get('node_default',{}) self.add_attributes("node", node_element, data, default) graph_element.append(node_element) def add_edges(self, G, graph_element): if G.is_multigraph(): for u,v,key,data in G.edges_iter(data=True,keys=True): edge_element = Element("edge",source=make_str(u), target=make_str(v)) default=G.graph.get('edge_default',{}) self.add_attributes("edge", edge_element, data, default) self.add_attributes("edge", edge_element, {'key':key}, default) graph_element.append(edge_element) else: for u,v,data in G.edges_iter(data=True): edge_element = Element("edge",source=make_str(u), target=make_str(v)) default=G.graph.get('edge_default',{}) self.add_attributes("edge", edge_element, data, default) graph_element.append(edge_element) def add_graph_element(self, G): """ Serialize graph G in GraphML to the stream. """ if G.is_directed(): default_edge_type='directed' else: default_edge_type='undirected' graphid=G.graph.pop('id',None) if graphid is None: graph_element = Element("graph", edgedefault = default_edge_type) else: graph_element = Element("graph", edgedefault = default_edge_type, id=graphid) default={} data=dict((k,v) for (k,v) in G.graph.items() if k not in ['node_default','edge_default']) self.add_attributes("graph", graph_element, data, default) self.add_nodes(G,graph_element) self.add_edges(G,graph_element) self.xml.append(graph_element) def add_graphs(self, graph_list): """ Add many graphs to this GraphML document. """ for G in graph_list: self.add_graph_element(G) def dump(self, stream): if self.prettyprint: self.indent(self.xml) document = ElementTree(self.xml) header=''%self.encoding stream.write(header.encode(self.encoding)) document.write(stream, encoding=self.encoding) def indent(self, elem, level=0): # in-place prettyprint formatter i = "\n" + level*" " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: self.indent(elem, level+1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i class GraphMLReader(GraphML): """Read a GraphML document. Produces NetworkX graph objects. """ def __init__(self, node_type=str): try: import xml.etree.ElementTree except ImportError: raise ImportError('GraphML reader requires ' 'xml.elementtree.ElementTree') self.node_type=node_type self.multigraph=False # assume multigraph and test for parallel edges def __call__(self, path=None, string=None): if path is not None: self.xml = ElementTree(file=path) elif string is not None: self.xml = fromstring(string) else: raise ValueError("Must specify either 'path' or 'string' as kwarg.") (keys,defaults) = self.find_graphml_keys(self.xml) for g in self.xml.findall("{%s}graph" % self.NS_GRAPHML): yield self.make_graph(g, keys, defaults) def make_graph(self, graph_xml, graphml_keys, defaults): # set default graph type edgedefault = graph_xml.get("edgedefault", None) if edgedefault=='directed': G=nx.MultiDiGraph() else: G=nx.MultiGraph() # set defaults for graph attributes G.graph['node_default']={} G.graph['edge_default']={} for key_id,value in defaults.items(): key_for=graphml_keys[key_id]['for'] name=graphml_keys[key_id]['name'] python_type=graphml_keys[key_id]['type'] if key_for=='node': G.graph['node_default'].update({name:python_type(value)}) if key_for=='edge': G.graph['edge_default'].update({name:python_type(value)}) # hyperedges are not supported hyperedge=graph_xml.find("{%s}hyperedge" % self.NS_GRAPHML) if hyperedge is not None: raise nx.NetworkXError("GraphML reader does not support hyperedges") # add nodes for node_xml in graph_xml.findall("{%s}node" % self.NS_GRAPHML): self.add_node(G, node_xml, graphml_keys) # add edges for edge_xml in graph_xml.findall("{%s}edge" % self.NS_GRAPHML): self.add_edge(G, edge_xml, graphml_keys) # add graph data data = self.decode_data_elements(graphml_keys, graph_xml) G.graph.update(data) # switch to Graph or DiGraph if no parallel edges were found. if not self.multigraph: if G.is_directed(): return nx.DiGraph(G) else: return nx.Graph(G) else: return G def add_node(self, G, node_xml, graphml_keys): """Add a node to the graph. """ # warn on finding unsupported ports tag ports=node_xml.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # find the node by id and cast it to the appropriate type node_id = self.node_type(node_xml.get("id")) # get data/attributes for node data = self.decode_data_elements(graphml_keys, node_xml) G.add_node(node_id, data) def add_edge(self, G, edge_element, graphml_keys): """Add an edge to the graph. """ # warn on finding unsupported ports tag ports=edge_element.find("{%s}port" % self.NS_GRAPHML) if ports is not None: warnings.warn("GraphML port tag not supported.") # raise error if we find mixed directed and undirected edges directed = edge_element.get("directed") if G.is_directed() and directed=='false': raise nx.NetworkXError(\ "directed=false edge found in directed graph.") if (not G.is_directed()) and directed=='true': raise nx.NetworkXError(\ "directed=true edge found in undirected graph.") source = self.node_type(edge_element.get("source")) target = self.node_type(edge_element.get("target")) data = self.decode_data_elements(graphml_keys, edge_element) # GraphML stores edge ids as an attribute # NetworkX uses them as keys in multigraphs too if no key # attribute is specified edge_id = edge_element.get("id") if edge_id: data["id"] = edge_id if G.has_edge(source,target): # mark this as a multigraph self.multigraph=True if edge_id is None: # no id specified, try using 'key' attribute as id edge_id=data.pop('key',None) G.add_edge(source, target, key=edge_id, **data) def decode_data_elements(self, graphml_keys, obj_xml): """Use the key information to decode the data XML if present.""" data = {} for data_element in obj_xml.findall("{%s}data" % self.NS_GRAPHML): key = data_element.get("key") try: data_name=graphml_keys[key]['name'] data_type=graphml_keys[key]['type'] except KeyError: raise nx.NetworkXError("Bad GraphML data: no key %s"%key) text=data_element.text # assume anything with subelements is a yfiles extension if text is not None and len(list(data_element))==0: if data_type==bool: data[data_name] = self.convert_bool[text] else: data[data_name] = data_type(text) elif len(list(data_element)) > 0: # Assume yfiles as subelements, try to extract node_label node_label = None for node_type in ['ShapeNode', 'SVGNode', 'ImageNode']: geometry = data_element.find("{%s}%s/{%s}Geometry" % (self.NS_Y, node_type, self.NS_Y)) if geometry is not None: data['x'] = geometry.get('x') data['y'] = geometry.get('y') if node_label is None: node_label = data_element.find("{%s}%s/{%s}NodeLabel" % (self.NS_Y, node_type, self.NS_Y)) if node_label is not None: data['label'] = node_label.text # check all the diffrent types of edges avaivable in yEd. for e in ['PolyLineEdge', 'SplineEdge', 'QuadCurveEdge', 'BezierEdge', 'ArcEdge']: edge_label = data_element.find("{%s}%s/{%s}EdgeLabel"% (self.NS_Y, e, (self.NS_Y))) if edge_label is not None: break if edge_label is not None: data['label'] = edge_label.text return data def find_graphml_keys(self, graph_element): """Extracts all the keys and key defaults from the xml. """ graphml_keys = {} graphml_key_defaults = {} for k in graph_element.findall("{%s}key" % self.NS_GRAPHML): attr_id = k.get("id") attr_type=k.get('attr.type') attr_name=k.get("attr.name") yfiles_type=k.get("yfiles.type") if yfiles_type is not None: attr_name = yfiles_type attr_type = 'yfiles' if attr_type is None: attr_type = "string" warnings.warn("No key type for id %s. Using string"%attr_id) if attr_name is None: raise nx.NetworkXError("Unknown key for id %s in file."%attr_id) graphml_keys[attr_id] = { "name":attr_name, "type":self.python_type[attr_type], "for":k.get("for")} # check for "default" subelement of key element default=k.find("{%s}default" % self.NS_GRAPHML) if default is not None: graphml_key_defaults[attr_id]=default.text return graphml_keys,graphml_key_defaults # fixture for nose tests def setup_module(module): from nose import SkipTest try: import xml.etree.ElementTree except: raise SkipTest("xml.etree.ElementTree not available") # fixture for nose tests def teardown_module(module): import os try: os.unlink('test.graphml') except: pass networkx-1.8.1/networkx/readwrite/multiline_adjlist.py0000664000175000017500000002734512177456333023245 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ************************* Multi-line Adjacency List ************************* Read and write NetworkX graphs as multi-line adjacency lists. The multi-line adjacency list format is useful for graphs with nodes that can be meaningfully represented as strings. With this format simple edge data can be stored but node or graph data is not. Format ------ The first label in a line is the source node label followed by the node degree d. The next d lines are target node labels and optional edge data. That pattern repeats for all nodes in the graph. The graph with edges a-b, a-c, d-e can be represented as the following adjacency list (anything following the # in a line is a comment):: # example.multiline-adjlist a 2 b c d 1 e """ __author__ = '\n'.join(['Aric Hagberg ', 'Dan Schult ', 'Loïc Séguin-C. ']) # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['generate_multiline_adjlist', 'write_multiline_adjlist', 'parse_multiline_adjlist', 'read_multiline_adjlist'] from networkx.utils import make_str, open_file import networkx as nx def generate_multiline_adjlist(G, delimiter = ' '): """Generate a single line of the graph G in multiline adjacency list format. Parameters ---------- G : NetworkX graph delimiter : string, optional Separator for node labels Returns ------- lines : string Lines of data in multiline adjlist format. Examples -------- >>> G = nx.lollipop_graph(4, 3) >>> for line in nx.generate_multiline_adjlist(G): ... print(line) 0 3 1 {} 2 {} 3 {} 1 2 2 {} 3 {} 2 1 3 {} 3 1 4 {} 4 1 5 {} 5 1 6 {} 6 0 See Also -------- write_multiline_adjlist, read_multiline_adjlist """ if G.is_directed(): if G.is_multigraph(): for s,nbrs in G.adjacency_iter(): nbr_edges=[ (u,data) for u,datadict in nbrs.items() for key,data in datadict.items()] deg=len(nbr_edges) yield make_str(s)+delimiter+"%i"%(deg) for u,d in nbr_edges: if d is None: yield make_str(u) else: yield make_str(u)+delimiter+make_str(d) else: # directed single edges for s,nbrs in G.adjacency_iter(): deg=len(nbrs) yield make_str(s)+delimiter+"%i"%(deg) for u,d in nbrs.items(): if d is None: yield make_str(u) else: yield make_str(u)+delimiter+make_str(d) else: # undirected if G.is_multigraph(): seen=set() # helper dict used to avoid duplicate edges for s,nbrs in G.adjacency_iter(): nbr_edges=[ (u,data) for u,datadict in nbrs.items() if u not in seen for key,data in datadict.items()] deg=len(nbr_edges) yield make_str(s)+delimiter+"%i"%(deg) for u,d in nbr_edges: if d is None: yield make_str(u) else: yield make_str(u)+delimiter+make_str(d) seen.add(s) else: # undirected single edges seen=set() # helper dict used to avoid duplicate edges for s,nbrs in G.adjacency_iter(): nbr_edges=[ (u,d) for u,d in nbrs.items() if u not in seen] deg=len(nbr_edges) yield make_str(s)+delimiter+"%i"%(deg) for u,d in nbr_edges: if d is None: yield make_str(u) else: yield make_str(u)+delimiter+make_str(d) seen.add(s) @open_file(1,mode='wb') def write_multiline_adjlist(G, path, delimiter=' ', comments='#', encoding = 'utf-8'): """ Write the graph G in multiline adjacency list format to path Parameters ---------- G : NetworkX graph comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels encoding : string, optional Text encoding. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_multiline_adjlist(G,"test.adjlist") The path can be a file handle or a string with the name of the file. If a file handle is provided, it has to be opened in 'wb' mode. >>> fh=open("test.adjlist",'wb') >>> nx.write_multiline_adjlist(G,fh) Filenames ending in .gz or .bz2 will be compressed. >>> nx.write_multiline_adjlist(G,"test.adjlist.gz") See Also -------- read_multiline_adjlist """ import sys import time pargs=comments+" ".join(sys.argv) header = ("%s\n" % (pargs) + comments + " GMT %s\n" % (time.asctime(time.gmtime())) + comments + " %s\n" % (G.name)) path.write(header.encode(encoding)) for multiline in generate_multiline_adjlist(G, delimiter): multiline+='\n' path.write(multiline.encode(encoding)) def parse_multiline_adjlist(lines, comments = '#', delimiter = None, create_using = None, nodetype = None, edgetype = None): """Parse lines of a multiline adjacency list representation of a graph. Parameters ---------- lines : list or iterator of strings Input data in multiline adjlist format create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. nodetype : Python type, optional Convert nodes to this type. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels. The default is whitespace. create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. Returns ------- G: NetworkX graph The graph corresponding to the lines in multiline adjacency list format. Examples -------- >>> lines = ['1 2', ... "2 {'weight':3, 'name': 'Frodo'}", ... "3 {}", ... "2 1", ... "5 {'weight':6, 'name': 'Saruman'}"] >>> G = nx.parse_multiline_adjlist(iter(lines), nodetype = int) >>> G.nodes() [1, 2, 3, 5] """ from ast import literal_eval if create_using is None: G=nx.Graph() else: try: G=create_using G.clear() except: raise TypeError("Input graph is not a networkx graph type") for line in lines: p=line.find(comments) if p>=0: line = line[:p] if not line: continue try: (u,deg)=line.strip().split(delimiter) deg=int(deg) except: raise TypeError("Failed to read node and degree on line (%s)"%line) if nodetype is not None: try: u=nodetype(u) except: raise TypeError("Failed to convert node (%s) to type %s"\ %(u,nodetype)) G.add_node(u) for i in range(deg): while True: try: line = next(lines) except StopIteration: msg = "Failed to find neighbor for node (%s)" % (u,) raise TypeError(msg) p=line.find(comments) if p>=0: line = line[:p] if line: break vlist=line.strip().split(delimiter) numb=len(vlist) if numb<1: continue # isolated node v=vlist.pop(0) data=''.join(vlist) if nodetype is not None: try: v=nodetype(v) except: raise TypeError( "Failed to convert node (%s) to type %s"\ %(v,nodetype)) if edgetype is not None: try: edgedata={'weight':edgetype(data)} except: raise TypeError( "Failed to convert edge data (%s) to type %s"\ %(data, edgetype)) else: try: # try to evaluate edgedata=literal_eval(data) except: edgedata={} G.add_edge(u,v,attr_dict=edgedata) return G @open_file(0,mode='rb') def read_multiline_adjlist(path, comments="#", delimiter=None, create_using=None, nodetype=None, edgetype=None, encoding = 'utf-8'): """Read graph in multi-line adjacency list format from path. Parameters ---------- path : string or file Filename or file handle to read. Filenames ending in .gz or .bz2 will be uncompressed. create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. nodetype : Python type, optional Convert nodes to this type. edgetype : Python type, optional Convert edge data to this type. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels. The default is whitespace. create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. Returns ------- G: NetworkX graph Examples -------- >>> G=nx.path_graph(4) >>> nx.write_multiline_adjlist(G,"test.adjlist") >>> G=nx.read_multiline_adjlist("test.adjlist") The path can be a file or a string with the name of the file. If a file s provided, it has to be opened in 'rb' mode. >>> fh=open("test.adjlist", 'rb') >>> G=nx.read_multiline_adjlist(fh) Filenames ending in .gz or .bz2 will be compressed. >>> nx.write_multiline_adjlist(G,"test.adjlist.gz") >>> G=nx.read_multiline_adjlist("test.adjlist.gz") The optional nodetype is a function to convert node strings to nodetype. For example >>> G=nx.read_multiline_adjlist("test.adjlist", nodetype=int) will attempt to convert all nodes to integer type. The optional edgetype is a function to convert edge data strings to edgetype. >>> G=nx.read_multiline_adjlist("test.adjlist") The optional create_using parameter is a NetworkX graph container. The default is Graph(), an undirected graph. To read the data as a directed graph use >>> G=nx.read_multiline_adjlist("test.adjlist", create_using=nx.DiGraph()) Notes ----- This format does not store graph, node, or edge data. See Also -------- write_multiline_adjlist """ lines = (line.decode(encoding) for line in path) return parse_multiline_adjlist(lines, comments = comments, delimiter = delimiter, create_using = create_using, nodetype = nodetype, edgetype = edgetype) # fixture for nose tests def teardown_module(module): import os os.unlink('test.adjlist') os.unlink('test.adjlist.gz') networkx-1.8.1/networkx/readwrite/gml.py0000664000175000017500000002711612177456333020304 0ustar aricaric00000000000000""" Read graphs in GML format. "GML, the G>raph Modelling Language, is our proposal for a portable file format for graphs. GML's key features are portability, simple syntax, extensibility and flexibility. A GML file consists of a hierarchical key-value lists. Graphs can be annotated with arbitrary data structures. The idea for a common file format was born at the GD'95; this proposal is the outcome of many discussions. GML is the standard file format in the Graphlet graph editor system. It has been overtaken and adapted by several other systems for drawing graphs." See http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html Requires pyparsing: http://pyparsing.wikispaces.com/ Format ------ See http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html for format specification. Example graphs in GML format: http://www-personal.umich.edu/~mejn/netdata/ """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2008-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['read_gml', 'parse_gml', 'generate_gml', 'write_gml'] import networkx as nx from networkx.exception import NetworkXError from networkx.utils import is_string_like, open_file @open_file(0,mode='rb') def read_gml(path,encoding='UTF-8',relabel=False): """Read graph in GML format from path. Parameters ---------- path : filename or filehandle The filename or filehandle to read from. encoding : string, optional Text encoding. relabel : bool, optional If True use the GML node label attribute for node names otherwise use the node id. Returns ------- G : MultiGraph or MultiDiGraph Raises ------ ImportError If the pyparsing module is not available. See Also -------- write_gml, parse_gml Notes ----- Requires pyparsing: http://pyparsing.wikispaces.com/ References ---------- GML specification: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html Examples -------- >>> G=nx.path_graph(4) >>> nx.write_gml(G,'test.gml') >>> H=nx.read_gml('test.gml') """ lines=(line.decode(encoding) for line in path) G=parse_gml(lines,relabel=relabel) return G def parse_gml(lines, relabel=True): """Parse GML graph from a string or iterable. Parameters ---------- lines : string or iterable Data in GML format. relabel : bool, optional If True use the GML node label attribute for node names otherwise use the node id. Returns ------- G : MultiGraph or MultiDiGraph Raises ------ ImportError If the pyparsing module is not available. See Also -------- write_gml, read_gml Notes ----- This stores nested GML attributes as dictionaries in the NetworkX graph, node, and edge attribute structures. Requires pyparsing: http://pyparsing.wikispaces.com/ References ---------- GML specification: http://www.infosun.fim.uni-passau.de/Graphlet/GML/gml-tr.html """ try: from pyparsing import ParseException except ImportError: try: from matplotlib.pyparsing import ParseException except: raise ImportError('Import Error: not able to import pyparsing:', 'http://pyparsing.wikispaces.com/') try: data = "".join(lines) gml = pyparse_gml() tokens =gml.parseString(data) except ParseException as err: print((err.line)) print((" "*(err.column-1) + "^")) print(err) raise # function to recursively make dicts of key/value pairs def wrap(tok): listtype=type(tok) result={} for k,v in tok: if type(v)==listtype: result[str(k)]=wrap(v) else: result[str(k)]=v return result # Set flag multigraph=False # but assume multigraphs to start if tokens.directed==1: G=nx.MultiDiGraph() else: G=nx.MultiGraph() for k,v in tokens.asList(): if k=="node": vdict=wrap(v) node=vdict['id'] G.add_node(node,attr_dict=vdict) elif k=="edge": vdict=wrap(v) source=vdict.pop('source') target=vdict.pop('target') if G.has_edge(source,target): multigraph=True G.add_edge(source,target,attr_dict=vdict) else: G.graph[k]=v # switch to Graph or DiGraph if no parallel edges were found. if not multigraph: if G.is_directed(): G=nx.DiGraph(G) else: G=nx.Graph(G) if relabel: # relabel, but check for duplicate labels first mapping=[(n,d['label']) for n,d in G.node.items()] x,y=zip(*mapping) if len(set(y))!=len(G): raise NetworkXError('Failed to relabel nodes: ' 'duplicate node labels found. ' 'Use relabel=False.') G=nx.relabel_nodes(G,dict(mapping)) return G def pyparse_gml(): """A pyparsing tokenizer for GML graph format. This is not intended to be called directly. See Also -------- write_gml, read_gml, parse_gml """ try: from pyparsing import \ Literal, CaselessLiteral, Word, Forward,\ ZeroOrMore, Group, Dict, Optional, Combine,\ ParseException, restOfLine, White, alphas, alphanums, nums,\ OneOrMore,quotedString,removeQuotes,dblQuotedString, Regex except ImportError: try: from matplotlib.pyparsing import \ Literal, CaselessLiteral, Word, Forward,\ ZeroOrMore, Group, Dict, Optional, Combine,\ ParseException, restOfLine, White, alphas, alphanums, nums,\ OneOrMore,quotedString,removeQuotes,dblQuotedString, Regex except: raise ImportError('pyparsing not found', 'http://pyparsing.wikispaces.com/') lbrack = Literal("[").suppress() rbrack = Literal("]").suppress() pound = ("#") comment = pound + Optional( restOfLine ) integer = Word(nums+'-').setParseAction(lambda s,l,t:[ int(t[0])]) real = Regex(r"[+-]?\d+\.\d*([eE][+-]?\d+)?").setParseAction( lambda s,l,t:[ float(t[0]) ]) dblQuotedString.setParseAction( removeQuotes ) key = Word(alphas,alphanums+'_') value_atom = (real | integer | Word(alphanums) | dblQuotedString) value = Forward() # to be defined later with << operator keyvalue = Group(key+value) value << (value_atom | Group( lbrack + ZeroOrMore(keyvalue) + rbrack )) node = Group(Literal("node") + lbrack + Group(OneOrMore(keyvalue)) + rbrack) edge = Group(Literal("edge") + lbrack + Group(OneOrMore(keyvalue)) + rbrack) creator = Group(Literal("Creator")+ Optional( restOfLine )) version = Group(Literal("Version")+ Optional( restOfLine )) graphkey = Literal("graph").suppress() graph = Dict (Optional(creator)+Optional(version)+\ graphkey + lbrack + ZeroOrMore( (node|edge|keyvalue) ) + rbrack ) graph.ignore(comment) return graph def generate_gml(G): """Generate a single entry of the graph G in GML format. Parameters ---------- G : NetworkX graph Returns ------- lines: string Lines in GML format. Notes ----- This implementation does not support all Python data types as GML data. Nodes, node attributes, edge attributes, and graph attributes must be either dictionaries or single stings or numbers. If they are not an attempt is made to represent them as strings. For example, a list as edge data G[1][2]['somedata']=[1,2,3], will be represented in the GML file as:: edge [ source 1 target 2 somedata "[1, 2, 3]" ] """ # recursively make dicts into gml brackets def listify(d,indent,indentlevel): result='[ \n' for k,v in d.items(): if type(v)==dict: v=listify(v,indent,indentlevel+1) result += (indentlevel+1)*indent + \ string_item(k,v,indentlevel*indent)+'\n' return result+indentlevel*indent+"]" def string_item(k,v,indent): # try to make a string of the data if type(v)==dict: v=listify(v,indent,2) elif is_string_like(v): v='"%s"'%v elif type(v)==bool: v=int(v) return "%s %s"%(k,v) # check for attributes or assign empty dict if hasattr(G,'graph_attr'): graph_attr=G.graph_attr else: graph_attr={} if hasattr(G,'node_attr'): node_attr=G.node_attr else: node_attr={} indent=2*' ' count=iter(range(len(G))) node_id={} yield "graph [" if G.is_directed(): yield indent+"directed 1" # write graph attributes for k,v in G.graph.items(): if k == 'directed': continue yield indent+string_item(k,v,indent) # write nodes for n in G: yield indent+"node [" # get id or assign number nid=G.node[n].get('id',next(count)) node_id[n]=nid yield 2*indent+"id %s"%nid label=G.node[n].get('label',n) if is_string_like(label): label='"%s"'%label yield 2*indent+'label %s'%label if n in G: for k,v in G.node[n].items(): if k=='id' or k == 'label': continue yield 2*indent+string_item(k,v,indent) yield indent+"]" # write edges for u,v,edgedata in G.edges_iter(data=True): yield indent+"edge [" yield 2*indent+"source %s"%node_id[u] yield 2*indent+"target %s"%node_id[v] for k,v in edgedata.items(): if k=='source': continue if k=='target': continue yield 2*indent+string_item(k,v,indent) yield indent+"]" yield "]" @open_file(1,mode='wb') def write_gml(G, path): """ Write the graph G in GML format to the file or file handle path. Parameters ---------- path : filename or filehandle The filename or filehandle to write. Filenames ending in .gz or .gz2 will be compressed. See Also -------- read_gml, parse_gml Notes ----- GML specifications indicate that the file should only use 7bit ASCII text encoding.iso8859-1 (latin-1). This implementation does not support all Python data types as GML data. Nodes, node attributes, edge attributes, and graph attributes must be either dictionaries or single stings or numbers. If they are not an attempt is made to represent them as strings. For example, a list as edge data G[1][2]['somedata']=[1,2,3], will be represented in the GML file as:: edge [ source 1 target 2 somedata "[1, 2, 3]" ] Examples --------- >>> G=nx.path_graph(4) >>> nx.write_gml(G,"test.gml") Filenames ending in .gz or .bz2 will be compressed. >>> nx.write_gml(G,"test.gml.gz") """ for line in generate_gml(G): line+='\n' path.write(line.encode('latin-1')) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import pyparsing except: try: import matplotlib.pyparsing except: raise SkipTest("pyparsing not available") # fixture for nose tests def teardown_module(module): import os os.unlink('test.gml') os.unlink('test.gml.gz') networkx-1.8.1/networkx/readwrite/tests/0000775000175000017500000000000012177457361020310 5ustar aricaric00000000000000networkx-1.8.1/networkx/readwrite/tests/test_gexf.py0000664000175000017500000002473412177456333022662 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx as nx import io class TestGEXF(object): @classmethod def setupClass(cls): try: import xml.etree.ElementTree except ImportError: raise SkipTest('xml.etree.ElementTree not available.') def setUp(self): self.simple_directed_data=""" """ self.simple_directed_graph=nx.DiGraph() self.simple_directed_graph.add_node('0',label='Hello') self.simple_directed_graph.add_node('1',label='World') self.simple_directed_graph.add_edge('0','1',id='0') self.simple_directed_fh = \ io.BytesIO(self.simple_directed_data.encode('UTF-8')) self.attribute_data=""" Gephi.org A Web network true """ self.attribute_graph=nx.DiGraph() self.attribute_graph.graph['node_default']={'frog':True} self.attribute_graph.add_node('0', label='Gephi', url='http://gephi.org', indegree=1) self.attribute_graph.add_node('1', label='Webatlas', url='http://webatlas.fr', indegree=2) self.attribute_graph.add_node('2', label='RTGI', url='http://rtgi.fr', indegree=1) self.attribute_graph.add_node('3', label='BarabasiLab', url='http://barabasilab.com', indegree=1, frog=False) self.attribute_graph.add_edge('0','1',id='0') self.attribute_graph.add_edge('0','2',id='1') self.attribute_graph.add_edge('1','0',id='2') self.attribute_graph.add_edge('2','1',id='3') self.attribute_graph.add_edge('0','3',id='4') self.attribute_fh = io.BytesIO(self.attribute_data.encode('UTF-8')) self.simple_undirected_data=""" """ self.simple_undirected_graph=nx.Graph() self.simple_undirected_graph.add_node('0',label='Hello') self.simple_undirected_graph.add_node('1',label='World') self.simple_undirected_graph.add_edge('0','1',id='0') self.simple_undirected_fh = io.BytesIO(self.simple_undirected_data.encode('UTF-8')) def test_read_simple_directed_graphml(self): G=self.simple_directed_graph H=nx.read_gexf(self.simple_directed_fh) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal(sorted(G.edges()),sorted(H.edges())) assert_equal(sorted(G.edges(data=True)), sorted(H.edges(data=True))) self.simple_directed_fh.seek(0) def test_write_read_simple_directed_graphml(self): G=self.simple_directed_graph fh=io.BytesIO() nx.write_gexf(G,fh) fh.seek(0) H=nx.read_gexf(fh) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal(sorted(G.edges()),sorted(H.edges())) assert_equal(sorted(G.edges(data=True)), sorted(H.edges(data=True))) self.simple_directed_fh.seek(0) def test_read_simple_undirected_graphml(self): G=self.simple_undirected_graph H=nx.read_gexf(self.simple_undirected_fh) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal( sorted(sorted(e) for e in G.edges()), sorted(sorted(e) for e in H.edges())) self.simple_undirected_fh.seek(0) def test_read_attribute_graphml(self): G=self.attribute_graph H=nx.read_gexf(self.attribute_fh) assert_equal(sorted(G.nodes(True)),sorted(H.nodes(data=True))) ge=sorted(G.edges(data=True)) he=sorted(H.edges(data=True)) for a,b in zip(ge,he): assert_equal(a,b) self.attribute_fh.seek(0) def test_directed_edge_in_undirected(self): s=""" """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_gexf,fh) def test_undirected_edge_in_directed(self): s=""" """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_gexf,fh) def test_key_error(self): s=""" """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_gexf,fh) def test_relabel(self): s=""" """ fh = io.BytesIO(s.encode('UTF-8')) G=nx.read_gexf(fh,relabel=True) assert_equal(sorted(G.nodes()),["Hello","Word"]) def test_default_attribute(self): G=nx.Graph() G.add_node(1,label='1',color='green') G.add_path([0,1,2,3]) G.add_edge(1,2,foo=3) G.graph['node_default']={'color':'yellow'} G.graph['edge_default']={'foo':7} fh = io.BytesIO() nx.write_gexf(G,fh) fh.seek(0) H=nx.read_gexf(fh,node_type=int) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal( sorted(sorted(e) for e in G.edges()), sorted(sorted(e) for e in H.edges())) assert_equal(G.graph,H.graph) def test_serialize_ints_to_strings(self): G=nx.Graph() G.add_node(1,id=7,label=77) fh = io.BytesIO() nx.write_gexf(G,fh) fh.seek(0) H=nx.read_gexf(fh,node_type=int) assert_equal(H.nodes(),[7]) assert_equal(H.node[7]['label'],'77') def test_write_with_node_attributes(self): # Addresses #673. G = nx.path_graph(4) for i in range(4): G.node[i]['id'] = i G.node[i]['label'] = i G.node[i]['pid'] = i expected = """ """ obtained = '\n'.join(nx.generate_gexf(G)) assert_equal( expected, obtained ) networkx-1.8.1/networkx/readwrite/tests/test_shp.py0000664000175000017500000001070112177456333022510 0ustar aricaric00000000000000"""Unit tests for shp. """ import os import tempfile from nose import SkipTest from nose.tools import assert_equal import networkx as nx class TestShp(object): @classmethod def setupClass(cls): global ogr try: from osgeo import ogr except ImportError: raise SkipTest('ogr not available.') def deletetmp(self, drv, *paths): for p in paths: if os.path.exists(p): drv.DeleteDataSource(p) def setUp(self): def createlayer(driver): lyr = shp.CreateLayer("edges", None, ogr.wkbLineString) namedef = ogr.FieldDefn("Name", ogr.OFTString) namedef.SetWidth(32) lyr.CreateField(namedef) return lyr drv = ogr.GetDriverByName("ESRI Shapefile") testdir = os.path.join(tempfile.gettempdir(), 'shpdir') shppath = os.path.join(tempfile.gettempdir(), 'tmpshp.shp') self.deletetmp(drv, testdir, shppath) os.mkdir(testdir) shp = drv.CreateDataSource(shppath) lyr = createlayer(shp) self.names = ['a', 'b', 'c'] # edgenames self.paths = ( [(1.0, 1.0), (2.0, 2.0)], [(2.0, 2.0), (3.0, 3.0)], [(0.9, 0.9), (4.0, 2.0)] ) for path, name in zip(self.paths, self.names): feat = ogr.Feature(lyr.GetLayerDefn()) g = ogr.Geometry(ogr.wkbLineString) map(lambda xy: g.AddPoint_2D(*xy), path) feat.SetGeometry(g) feat.SetField("Name", name) lyr.CreateFeature(feat) self.shppath = shppath self.testdir = testdir self.drv = drv def testload(self): expected = nx.DiGraph() map(expected.add_path, self.paths) G = nx.read_shp(self.shppath) assert_equal(sorted(expected.node), sorted(G.node)) assert_equal(sorted(expected.edges()), sorted(G.edges())) names = [G.get_edge_data(s, e)['Name'] for s, e in G.edges()] assert_equal(self.names, sorted(names)) def checkgeom(self, lyr, expected): feature = lyr.GetNextFeature() actualwkt = [] while feature: actualwkt.append(feature.GetGeometryRef().ExportToWkt()) feature = lyr.GetNextFeature() assert_equal(sorted(expected), sorted(actualwkt)) def test_geometryexport(self): expectedpoints = ( "POINT (1 1)", "POINT (2 2)", "POINT (3 3)", "POINT (0.9 0.9)", "POINT (4 2)" ) expectedlines = ( "LINESTRING (1 1,2 2)", "LINESTRING (2 2,3 3)", "LINESTRING (0.9 0.9,4 2)" ) tpath = os.path.join(tempfile.gettempdir(), 'shpdir') G = nx.read_shp(self.shppath) nx.write_shp(G, tpath) shpdir = ogr.Open(tpath) self.checkgeom(shpdir.GetLayerByName("nodes"), expectedpoints) self.checkgeom(shpdir.GetLayerByName("edges"), expectedlines) def test_attributeexport(self): def testattributes(lyr, graph): feature = lyr.GetNextFeature() while feature: coords = [] ref = feature.GetGeometryRef() for i in xrange(ref.GetPointCount()): coords.append(ref.GetPoint_2D(i)) name = feature.GetFieldAsString('Name') assert_equal(graph.get_edge_data(*coords)['Name'], name) feature = lyr.GetNextFeature() tpath = os.path.join(tempfile.gettempdir(), 'shpdir') G = nx.read_shp(self.shppath) nx.write_shp(G, tpath) shpdir = ogr.Open(tpath) edges = shpdir.GetLayerByName("edges") testattributes(edges, G) def test_wkt_export(self): G = nx.DiGraph() tpath = os.path.join(tempfile.gettempdir(), 'shpdir') points = ( "POINT (0.9 0.9)", "POINT (4 2)" ) line = ( "LINESTRING (0.9 0.9,4 2)", ) G.add_node(1, Wkt=points[0]) G.add_node(2, Wkt=points[1]) G.add_edge(1, 2, Wkt=line[0]) try: nx.write_shp(G, tpath) except Exception as e: assert False, e shpdir = ogr.Open(tpath) self.checkgeom(shpdir.GetLayerByName("nodes"), points) self.checkgeom(shpdir.GetLayerByName("edges"), line) def tearDown(self): self.deletetmp(self.drv, self.testdir, self.shppath) networkx-1.8.1/networkx/readwrite/tests/test_graphml.py0000664000175000017500000004200312177456333023350 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx as nx import io import tempfile import os class TestGraph(object): @classmethod def setupClass(cls): try: import xml.etree.ElementTree except ImportError: raise SkipTest('xml.etree.ElementTree not available.') def setUp(self): self.simple_directed_data=""" """ self.simple_directed_graph=nx.DiGraph() self.simple_directed_graph.add_node('n10') self.simple_directed_graph.add_edge('n0','n2',id='foo') self.simple_directed_graph.add_edges_from([('n1','n2'), ('n2','n3'), ('n3','n5'), ('n3','n4'), ('n4','n6'), ('n6','n5'), ('n5','n7'), ('n6','n8'), ('n8','n7'), ('n8','n9'), ]) self.simple_directed_fh = \ io.BytesIO(self.simple_directed_data.encode('UTF-8')) self.attribute_data=""" yellow green blue red turquoise 1.0 1.0 2.0 1.1 """ self.attribute_graph=nx.DiGraph(id='G') self.attribute_graph.graph['node_default']={'color':'yellow'} self.attribute_graph.add_node('n0',color='green') self.attribute_graph.add_node('n2',color='blue') self.attribute_graph.add_node('n3',color='red') self.attribute_graph.add_node('n4') self.attribute_graph.add_node('n5',color='turquoise') self.attribute_graph.add_edge('n0','n2',id='e0',weight=1.0) self.attribute_graph.add_edge('n0','n1',id='e1',weight=1.0) self.attribute_graph.add_edge('n1','n3',id='e2',weight=2.0) self.attribute_graph.add_edge('n3','n2',id='e3') self.attribute_graph.add_edge('n2','n4',id='e4') self.attribute_graph.add_edge('n3','n5',id='e5') self.attribute_graph.add_edge('n5','n4',id='e6',weight=1.1) self.attribute_fh = io.BytesIO(self.attribute_data.encode('UTF-8')) self.simple_undirected_data=""" """ # self.simple_undirected_graph=nx.Graph() self.simple_undirected_graph.add_node('n10') self.simple_undirected_graph.add_edge('n0','n2',id='foo') self.simple_undirected_graph.add_edges_from([('n1','n2'), ('n2','n3'), ]) self.simple_undirected_fh = io.BytesIO(self.simple_undirected_data.encode('UTF-8')) def test_read_simple_directed_graphml(self): G=self.simple_directed_graph H=nx.read_graphml(self.simple_directed_fh) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal(sorted(G.edges()),sorted(H.edges())) assert_equal(sorted(G.edges(data=True)), sorted(H.edges(data=True))) self.simple_directed_fh.seek(0) I=nx.parse_graphml(self.simple_directed_data) assert_equal(sorted(G.nodes()),sorted(I.nodes())) assert_equal(sorted(G.edges()),sorted(I.edges())) assert_equal(sorted(G.edges(data=True)), sorted(I.edges(data=True))) def test_write_read_simple_directed_graphml(self): G=self.simple_directed_graph fh=io.BytesIO() nx.write_graphml(G,fh) fh.seek(0) H=nx.read_graphml(fh) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal(sorted(G.edges()),sorted(H.edges())) assert_equal(sorted(G.edges(data=True)), sorted(H.edges(data=True))) self.simple_directed_fh.seek(0) def test_read_simple_undirected_graphml(self): G=self.simple_undirected_graph H=nx.read_graphml(self.simple_undirected_fh) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal( sorted(sorted(e) for e in G.edges()), sorted(sorted(e) for e in H.edges())) self.simple_undirected_fh.seek(0) I=nx.parse_graphml(self.simple_undirected_data) assert_equal(sorted(G.nodes()),sorted(I.nodes())) assert_equal( sorted(sorted(e) for e in G.edges()), sorted(sorted(e) for e in I.edges())) def test_read_attribute_graphml(self): G=self.attribute_graph H=nx.read_graphml(self.attribute_fh) assert_equal(sorted(G.nodes(True)),sorted(H.nodes(data=True))) ge=sorted(G.edges(data=True)) he=sorted(H.edges(data=True)) for a,b in zip(ge,he): assert_equal(a,b) self.attribute_fh.seek(0) I=nx.parse_graphml(self.attribute_data) assert_equal(sorted(G.nodes(True)),sorted(I.nodes(data=True))) ge=sorted(G.edges(data=True)) he=sorted(I.edges(data=True)) for a,b in zip(ge,he): assert_equal(a,b) def test_directed_edge_in_undirected(self): s=""" """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_graphml,fh) assert_raises(nx.NetworkXError,nx.parse_graphml,s) def test_undirected_edge_in_directed(self): s=""" """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_graphml,fh) assert_raises(nx.NetworkXError,nx.parse_graphml,s) def test_key_error(self): s=""" yellow green blue 1.0 """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_graphml,fh) assert_raises(nx.NetworkXError,nx.parse_graphml,s) def test_hyperedge_error(self): s=""" yellow green blue """ fh = io.BytesIO(s.encode('UTF-8')) assert_raises(nx.NetworkXError,nx.read_graphml,fh) assert_raises(nx.NetworkXError,nx.parse_graphml,s) # remove test until we get the "name" issue sorted # https://networkx.lanl.gov/trac/ticket/544 def test_default_attribute(self): G=nx.Graph() G.add_node(1,label=1,color='green') G.add_path([0,1,2,3]) G.add_edge(1,2,weight=3) G.graph['node_default']={'color':'yellow'} G.graph['edge_default']={'weight':7} fh = io.BytesIO() nx.write_graphml(G,fh) fh.seek(0) H=nx.read_graphml(fh,node_type=int) assert_equal(sorted(G.nodes()),sorted(H.nodes())) assert_equal( sorted(sorted(e) for e in G.edges()), sorted(sorted(e) for e in H.edges())) assert_equal(G.graph,H.graph) def test_multigraph_keys(self): # test that multigraphs use edge id attributes as key pass def test_multigraph_to_graph(self): # test converting multigraph to graph if no parallel edges are found pass def test_yfiles_extension(self): data=""" 1 2 """ fh = io.BytesIO(data.encode('UTF-8')) G=nx.read_graphml(fh) assert_equal(G.edges(),[('n0','n1')]) assert_equal(G['n0']['n1']['id'],'e0') assert_equal(G.node['n0']['label'],'1') assert_equal(G.node['n1']['label'],'2') H=nx.parse_graphml(data) assert_equal(H.edges(),[('n0','n1')]) assert_equal(H['n0']['n1']['id'],'e0') assert_equal(H.node['n0']['label'],'1') assert_equal(H.node['n1']['label'],'2') def test_unicode(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) node_type=str except ValueError: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) node_type=unicode G.add_edge(name1, 'Radiohead', attr_dict={'foo': name2}) fd, fname = tempfile.mkstemp() nx.write_graphml(G, fname) H = nx.read_graphml(fname,node_type=node_type) assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname) def test_bool(self): s=""" false True False true false """ fh = io.BytesIO(s.encode('UTF-8')) G=nx.read_graphml(fh) assert_equal(G.node['n0']['test'],True) assert_equal(G.node['n2']['test'],False) H=nx.parse_graphml(s) assert_equal(H.node['n0']['test'],True) assert_equal(H.node['n2']['test'],False) networkx-1.8.1/networkx/readwrite/tests/test_pajek.py0000664000175000017500000000511112177456333023007 0ustar aricaric00000000000000#!/usr/bin/env python """ Pajek tests """ from nose.tools import assert_equal from networkx import * import os,tempfile from io import open from networkx.testing import * class TestPajek(object): def setUp(self): self.data="""*network Tralala\n*vertices 4\n 1 "A1" 0.0938 0.0896 ellipse x_fact 1 y_fact 1\n 2 "Bb" 0.8188 0.2458 ellipse x_fact 1 y_fact 1\n 3 "C" 0.3688 0.7792 ellipse x_fact 1\n 4 "D2" 0.9583 0.8563 ellipse x_fact 1\n*arcs\n1 1 1 h2 0 w 3 c Blue s 3 a1 -130 k1 0.6 a2 -130 k2 0.6 ap 0.5 l "Bezier loop" lc BlueViolet fos 20 lr 58 lp 0.3 la 360\n2 1 1 h2 0 a1 120 k1 1.3 a2 -120 k2 0.3 ap 25 l "Bezier arc" lphi 270 la 180 lr 19 lp 0.5\n1 2 1 h2 0 a1 40 k1 2.8 a2 30 k2 0.8 ap 25 l "Bezier arc" lphi 90 la 0 lp 0.65\n4 2 -1 h2 0 w 1 k1 -2 k2 250 ap 25 l "Circular arc" c Red lc OrangeRed\n3 4 1 p Dashed h2 0 w 2 c OliveGreen ap 25 l "Straight arc" lc PineGreen\n1 3 1 p Dashed h2 0 w 5 k1 -1 k2 -20 ap 25 l "Oval arc" c Brown lc Black\n3 3 -1 h1 6 w 1 h2 12 k1 -2 k2 -15 ap 0.5 l "Circular loop" c Red lc OrangeRed lphi 270 la 180""" self.G=nx.MultiDiGraph() self.G.add_nodes_from(['A1', 'Bb', 'C', 'D2']) self.G.add_edges_from([('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'), ('Bb', 'A1'),('C', 'C'), ('C', 'D2'), ('D2', 'Bb')]) self.G.graph['name']='Tralala' (self.fd,self.fname)=tempfile.mkstemp() fh=open(self.fname,'wb') fh.write(self.data.encode('UTF-8')) fh.close() def tearDown(self): os.close(self.fd) os.unlink(self.fname) def test_parse_pajek_simple(self): # Example without node positions or shape data="""*Vertices 2\n1 "1"\n2 "2"\n*Edges\n1 2\n2 1""" G=parse_pajek(data) assert_equal(sorted(G.nodes()), ['1', '2']) assert_edges_equal(G.edges(), [('1', '2'), ('1', '2')]) def test_parse_pajek(self): G=parse_pajek(self.data) assert_equal(sorted(G.nodes()), ['A1', 'Bb', 'C', 'D2']) assert_edges_equal(G.edges(), [('A1', 'A1'), ('A1', 'Bb'), ('A1', 'C'), ('Bb', 'A1'), ('C', 'C'), ('C', 'D2'), ('D2', 'Bb')]) def test_read_pajek(self): G=parse_pajek(self.data) Gin=read_pajek(self.fname) assert_equal(sorted(G.nodes()), sorted(Gin.nodes())) assert_edges_equal(G.edges(), Gin.edges()) assert_equal(self.G.graph,Gin.graph) for n in G.node: assert_equal(G.node[n],Gin.node[n]) networkx-1.8.1/networkx/readwrite/tests/test_sparsegraph6.py0000664000175000017500000000556212177456333024334 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx import os,tempfile class TestGraph6(object): def test_parse_graph6(self): data="""DF{""" G=nx.parse_graph6(data) assert_equal(sorted(G.nodes()),[0, 1, 2, 3, 4]) assert_equal([e for e in sorted(G.edges())], [(0, 3), (0, 4), (1, 3), (1, 4), (2, 3), (2, 4), (3, 4)]) def test_read_graph6(self): data="""DF{""" G=nx.parse_graph6(data) (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') b=fh.write(data) fh.close() Gin=nx.read_graph6(fname) assert_equal(sorted(G.nodes()),sorted(Gin.nodes())) assert_equal(sorted(G.edges()),sorted(Gin.edges())) os.close(fd) os.unlink(fname) def test_read_many_graph6(self): # Read many graphs into list data="""DF{\nD`{\nDqK\nD~{\n""" (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') b=fh.write(data) fh.close() glist=nx.read_graph6_list(fname) assert_equal(len(glist),4) for G in glist: assert_equal(sorted(G.nodes()),[0, 1, 2, 3, 4]) os.close(fd) os.unlink(fname) class TestSparseGraph6(object): def test_parse_sparse6(self): data=""":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM""" G=nx.parse_sparse6(data) assert_equal(sorted(G.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]) assert_equal([e for e in sorted(G.edges())], [(0, 1), (0, 2), (0, 3), (1, 12), (1, 14), (2, 13), (2, 15), (3, 16), (3, 17), (4, 7), (4, 9), (4, 11), (5, 6), (5, 8), (5, 9), (6, 10), (6, 11), (7, 8), (7, 10), (8, 12), (9, 15), (10, 14), (11, 13), (12, 16), (13, 17), (14, 17), (15, 16)]) def test_read_sparse6(self): data=""":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM""" G=nx.parse_sparse6(data) (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') b=fh.write(data) fh.close() Gin=nx.read_sparse6(fname) assert_equal(sorted(G.nodes()),sorted(Gin.nodes())) assert_equal(sorted(G.edges()),sorted(Gin.edges())) os.close(fd) os.unlink(fname) def test_read_many_graph6(self): # Read many graphs into list data=""":Q___eDcdFcDeFcE`GaJ`IaHbKNbLM\n:Q___dCfDEdcEgcbEGbFIaJ`JaHN`IM""" (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') b=fh.write(data) fh.close() glist=nx.read_sparse6_list(fname) assert_equal(len(glist),2) for G in glist: assert_equal(sorted(G.nodes()), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17]) os.close(fd) os.unlink(fname) networkx-1.8.1/networkx/readwrite/tests/test_p2g.py0000664000175000017500000000246012177456333022411 0ustar aricaric00000000000000from nose.tools import assert_equal, assert_raises, assert_not_equal import networkx as nx import io import tempfile import os from networkx.readwrite.p2g import * from networkx.testing import * class TestP2G: def setUp(self): self.G=nx.Graph(name="test") e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] self.G.add_edges_from(e) self.G.add_node('g') self.DG=nx.DiGraph(self.G) def test_read_p2g(self): s = b"""\ name 3 4 a 1 2 b c 0 2 """ bytesIO = io.BytesIO(s) G = read_p2g(bytesIO) assert_equal(G.name,'name') assert_equal(sorted(G),['a','b','c']) edges = [(str(u),str(v)) for u,v in G.edges()] assert_edges_equal(G.edges(),[('a','c'),('a','b'),('c','a'),('c','c')]) def test_write_p2g(self): s=b"""foo 3 2 1 1 2 2 3 """ fh=io.BytesIO() G=nx.DiGraph() G.name='foo' G.add_edges_from([(1,2),(2,3)]) write_p2g(G,fh) fh.seek(0) r=fh.read() assert_equal(r,s) def test_write_read_p2g(self): fh=io.BytesIO() G=nx.DiGraph() G.name='foo' G.add_edges_from([('a','b'),('b','c')]) write_p2g(G,fh) fh.seek(0) H=read_p2g(fh) assert_edges_equal(G.edges(),H.edges()) networkx-1.8.1/networkx/readwrite/tests/test_edgelist.py0000664000175000017500000001762012177456333023525 0ustar aricaric00000000000000""" Unit tests for edgelists. """ from nose.tools import assert_equal, assert_raises, assert_not_equal import networkx as nx import io import tempfile import os def assert_equal_edges(elist1,elist2): if len(elist1[0]) == 2: return assert_equal(sorted(sorted(e) for e in elist1), sorted(sorted(e) for e in elist2)) else: return assert_equal(sorted((sorted((u, v)), d) for u, v, d in elist1), sorted((sorted((u, v)), d) for u, v, d in elist2)) class TestEdgelist: def setUp(self): self.G=nx.Graph(name="test") e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] self.G.add_edges_from(e) self.G.add_node('g') self.DG=nx.DiGraph(self.G) self.XG=nx.MultiGraph() self.XG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)]) self. XDG=nx.MultiDiGraph(self.XG) def test_read_edgelist_1(self): s = b"""\ # comment line 1 2 # comment line 2 3 """ bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO,nodetype=int) assert_equal_edges(G.edges(),[(1,2),(2,3)]) def test_read_edgelist_2(self): s = b"""\ # comment line 1 2 2.0 # comment line 2 3 3.0 """ bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO,nodetype=int,data=False) assert_equal_edges(G.edges(),[(1,2),(2,3)]) bytesIO = io.BytesIO(s) G = nx.read_weighted_edgelist(bytesIO,nodetype=int) assert_equal_edges(G.edges(data=True),[(1,2,{'weight':2.0}),(2,3,{'weight':3.0})]) def test_read_edgelist_3(self): s = b"""\ # comment line 1 2 {'weight':2.0} # comment line 2 3 {'weight':3.0} """ bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO,nodetype=int,data=False) assert_equal_edges(G.edges(),[(1,2),(2,3)]) bytesIO = io.BytesIO(s) G = nx.read_edgelist(bytesIO,nodetype=int,data=True) assert_equal_edges(G.edges(data=True),[(1,2,{'weight':2.0}),(2,3,{'weight':3.0})]) def test_write_edgelist_1(self): fh=io.BytesIO() G=nx.Graph() G.add_edges_from([(1,2),(2,3)]) nx.write_edgelist(G,fh,data=False) fh.seek(0) assert_equal(fh.read(),b"1 2\n2 3\n") def test_write_edgelist_2(self): fh=io.BytesIO() G=nx.Graph() G.add_edges_from([(1,2),(2,3)]) nx.write_edgelist(G,fh,data=True) fh.seek(0) assert_equal(fh.read(),b"1 2 {}\n2 3 {}\n") def test_write_edgelist_3(self): fh=io.BytesIO() G=nx.Graph() G.add_edge(1,2,weight=2.0) G.add_edge(2,3,weight=3.0) nx.write_edgelist(G,fh,data=True) fh.seek(0) assert_equal(fh.read(),b"1 2 {'weight': 2.0}\n2 3 {'weight': 3.0}\n") def test_write_edgelist_4(self): fh=io.BytesIO() G=nx.Graph() G.add_edge(1,2,weight=2.0) G.add_edge(2,3,weight=3.0) nx.write_edgelist(G,fh,data=[('weight')]) fh.seek(0) assert_equal(fh.read(),b"1 2 2.0\n2 3 3.0\n") def test_unicode(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) except ValueError: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) G.add_edge(name1, 'Radiohead', attr_dict={name2: 3}) fd, fname = tempfile.mkstemp() nx.write_edgelist(G, fname) H = nx.read_edgelist(fname) assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname) def test_latin1_error(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) except ValueError: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) G.add_edge(name1, 'Radiohead', attr_dict={name2: 3}) fd, fname = tempfile.mkstemp() assert_raises(UnicodeEncodeError, nx.write_edgelist, G, fname, encoding = 'latin-1') os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() try: # Python 3.x blurb = chr(1245) # just to trigger the exception name1 = 'Bj' + chr(246) + 'rk' name2 = chr(220) + 'ber' except ValueError: # Python 2.6+ name1 = 'Bj' + unichr(246) + 'rk' name2 = unichr(220) + 'ber' G.add_edge(name1, 'Radiohead', attr_dict={name2: 3}) fd, fname = tempfile.mkstemp() nx.write_edgelist(G, fname, encoding = 'latin-1') H = nx.read_edgelist(fname, encoding = 'latin-1') assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname) def test_edgelist_graph(self): G=self.G (fd,fname)=tempfile.mkstemp() nx.write_edgelist(G,fname) H=nx.read_edgelist(fname) H2=nx.read_edgelist(fname) assert_not_equal(H,H2) # they should be different graphs G.remove_node('g') # isolated nodes are not written in edgelist assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_edgelist_digraph(self): G=self.DG (fd,fname)=tempfile.mkstemp() nx.write_edgelist(G,fname) H=nx.read_edgelist(fname,create_using=nx.DiGraph()) H2=nx.read_edgelist(fname,create_using=nx.DiGraph()) assert_not_equal(H,H2) # they should be different graphs G.remove_node('g') # isolated nodes are not written in edgelist assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_edgelist_integers(self): G=nx.convert_node_labels_to_integers(self.G) (fd,fname)=tempfile.mkstemp() nx.write_edgelist(G,fname) H=nx.read_edgelist(fname,nodetype=int) # isolated nodes are not written in edgelist G.remove_nodes_from(nx.isolates(G)) assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_edgelist_digraph(self): G=self.DG (fd,fname)=tempfile.mkstemp() nx.write_edgelist(G,fname) H=nx.read_edgelist(fname,create_using=nx.DiGraph()) G.remove_node('g') # isolated nodes are not written in edgelist H2=nx.read_edgelist(fname,create_using=nx.DiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_edgelist_multigraph(self): G=self.XG (fd,fname)=tempfile.mkstemp() nx.write_edgelist(G,fname) H=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiGraph()) H2=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_edgelist_multidigraph(self): G=self.XDG (fd,fname)=tempfile.mkstemp() nx.write_edgelist(G,fname) H=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiDiGraph()) H2=nx.read_edgelist(fname,nodetype=int,create_using=nx.MultiDiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) networkx-1.8.1/networkx/readwrite/tests/test_yaml.py0000664000175000017500000000241012177456333022656 0ustar aricaric00000000000000""" Unit tests for yaml. """ import os,tempfile from nose import SkipTest from nose.tools import assert_equal import networkx as nx class TestYaml(object): @classmethod def setupClass(cls): global yaml try: import yaml except ImportError: raise SkipTest('yaml not available.') def setUp(self): self.build_graphs() def build_graphs(self): self.G = nx.Graph(name="test") e = [('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] self.G.add_edges_from(e) self.G.add_node('g') self.DG = nx.DiGraph(self.G) self.MG = nx.MultiGraph() self.MG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)]) def assert_equal(self, G, data=False): (fd, fname) = tempfile.mkstemp() nx.write_yaml(G, fname) Gin = nx.read_yaml(fname); assert_equal(sorted(G.nodes()),sorted(Gin.nodes())) assert_equal(G.edges(data=data),Gin.edges(data=data)) os.close(fd) os.unlink(fname) def testUndirected(self): self.assert_equal(self.G, False) def testDirected(self): self.assert_equal(self.DG, False) def testMultiGraph(self): self.assert_equal(self.MG, True) networkx-1.8.1/networkx/readwrite/tests/test_adjlist.py0000664000175000017500000002367312177456333023364 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Unit tests for adjlist. """ import io from nose.tools import assert_equal, assert_raises, assert_not_equal import os import tempfile import networkx as nx from networkx.testing import * class TestAdjlist(): def setUp(self): self.G=nx.Graph(name="test") e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] self.G.add_edges_from(e) self.G.add_node('g') self.DG=nx.DiGraph(self.G) self.XG=nx.MultiGraph() self.XG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)]) self. XDG=nx.MultiDiGraph(self.XG) def test_read_multiline_adjlist_1(self): # Unit test for https://networkx.lanl.gov/trac/ticket/252 s = b"""# comment line 1 2 # comment line 2 3 """ bytesIO = io.BytesIO(s) G = nx.read_multiline_adjlist(bytesIO) adj = {'1': {'3': {}, '2': {}}, '3': {'1': {}}, '2': {'1': {}}} assert_equal(G.adj, adj) def test_unicode(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) except ValueError: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) G.add_edge(name1, 'Radiohead', {name2: 3}) fd, fname = tempfile.mkstemp() nx.write_multiline_adjlist(G, fname) H = nx.read_multiline_adjlist(fname) assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname) def test_latin1_error(self): G = nx.Graph() try: # Python 3.x name1 = chr(2344) + chr(123) + chr(6543) name2 = chr(5543) + chr(1543) + chr(324) except ValueError: # Python 2.6+ name1 = unichr(2344) + unichr(123) + unichr(6543) name2 = unichr(5543) + unichr(1543) + unichr(324) G.add_edge(name1, 'Radiohead', {name2: 3}) fd, fname = tempfile.mkstemp() assert_raises(UnicodeEncodeError, nx.write_multiline_adjlist, G, fname, encoding = 'latin-1') os.close(fd) os.unlink(fname) def test_latin1(self): G = nx.Graph() try: # Python 3.x blurb = chr(1245) # just to trigger the exception name1 = 'Bj' + chr(246) + 'rk' name2 = chr(220) + 'ber' except ValueError: # Python 2.6+ name1 = 'Bj' + unichr(246) + 'rk' name2 = unichr(220) + 'ber' G.add_edge(name1, 'Radiohead', {name2: 3}) fd, fname = tempfile.mkstemp() nx.write_multiline_adjlist(G, fname, encoding = 'latin-1') H = nx.read_multiline_adjlist(fname, encoding = 'latin-1') assert_equal(G.adj, H.adj) os.close(fd) os.unlink(fname) def test_adjlist_graph(self): G=self.G (fd,fname)=tempfile.mkstemp() nx.write_adjlist(G,fname) H=nx.read_adjlist(fname) H2=nx.read_adjlist(fname) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_adjlist_digraph(self): G=self.DG (fd,fname)=tempfile.mkstemp() nx.write_adjlist(G,fname) H=nx.read_adjlist(fname,create_using=nx.DiGraph()) H2=nx.read_adjlist(fname,create_using=nx.DiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_adjlist_integers(self): (fd,fname)=tempfile.mkstemp() G=nx.convert_node_labels_to_integers(self.G) nx.write_adjlist(G,fname) H=nx.read_adjlist(fname,nodetype=int) H2=nx.read_adjlist(fname,nodetype=int) assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_adjlist_digraph(self): G=self.DG (fd,fname)=tempfile.mkstemp() nx.write_adjlist(G,fname) H=nx.read_adjlist(fname,create_using=nx.DiGraph()) H2=nx.read_adjlist(fname,create_using=nx.DiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_adjlist_multigraph(self): G=self.XG (fd,fname)=tempfile.mkstemp() nx.write_adjlist(G,fname) H=nx.read_adjlist(fname,nodetype=int, create_using=nx.MultiGraph()) H2=nx.read_adjlist(fname,nodetype=int, create_using=nx.MultiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_adjlist_multidigraph(self): G=self.XDG (fd,fname)=tempfile.mkstemp() nx.write_adjlist(G,fname) H=nx.read_adjlist(fname,nodetype=int, create_using=nx.MultiDiGraph()) H2=nx.read_adjlist(fname,nodetype=int, create_using=nx.MultiDiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_adjlist_delimiter(self): fh=io.BytesIO() G = nx.path_graph(3) nx.write_adjlist(G, fh, delimiter=':') fh.seek(0) H = nx.read_adjlist(fh, nodetype=int, delimiter=':') assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) class TestMultilineAdjlist(): def setUp(self): self.G=nx.Graph(name="test") e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] self.G.add_edges_from(e) self.G.add_node('g') self.DG=nx.DiGraph(self.G) self.DG.remove_edge('b','a') self.DG.remove_edge('b','c') self.XG=nx.MultiGraph() self.XG.add_weighted_edges_from([(1,2,5),(1,2,5),(1,2,1),(3,3,42)]) self. XDG=nx.MultiDiGraph(self.XG) def test_multiline_adjlist_graph(self): G=self.G (fd,fname)=tempfile.mkstemp() nx.write_multiline_adjlist(G,fname) H=nx.read_multiline_adjlist(fname) H2=nx.read_multiline_adjlist(fname) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_multiline_adjlist_digraph(self): G=self.DG (fd,fname)=tempfile.mkstemp() nx.write_multiline_adjlist(G,fname) H=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph()) H2=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_multiline_adjlist_integers(self): (fd,fname)=tempfile.mkstemp() G=nx.convert_node_labels_to_integers(self.G) nx.write_multiline_adjlist(G,fname) H=nx.read_multiline_adjlist(fname,nodetype=int) H2=nx.read_multiline_adjlist(fname,nodetype=int) assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_multiline_adjlist_digraph(self): G=self.DG (fd,fname)=tempfile.mkstemp() nx.write_multiline_adjlist(G,fname) H=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph()) H2=nx.read_multiline_adjlist(fname,create_using=nx.DiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_edges_equal(H.edges(),G.edges()) os.close(fd) os.unlink(fname) def test_multiline_adjlist_multigraph(self): G=self.XG (fd,fname)=tempfile.mkstemp() nx.write_multiline_adjlist(G,fname) H=nx.read_multiline_adjlist(fname,nodetype=int, create_using=nx.MultiGraph()) H2=nx.read_multiline_adjlist(fname,nodetype=int, create_using=nx.MultiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_multiline_adjlist_multidigraph(self): G=self.XDG (fd,fname)=tempfile.mkstemp() nx.write_multiline_adjlist(G,fname) H=nx.read_multiline_adjlist(fname,nodetype=int, create_using=nx.MultiDiGraph()) H2=nx.read_multiline_adjlist(fname,nodetype=int, create_using=nx.MultiDiGraph()) assert_not_equal(H,H2) # they should be different graphs assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) os.close(fd) os.unlink(fname) def test_multiline_adjlist_delimiter(self): fh=io.BytesIO() G = nx.path_graph(3) nx.write_multiline_adjlist(G, fh, delimiter=':') fh.seek(0) H = nx.read_multiline_adjlist(fh, nodetype=int, delimiter=':') assert_equal(sorted(H.nodes()),sorted(G.nodes())) assert_equal(sorted(H.edges()),sorted(G.edges())) networkx-1.8.1/networkx/readwrite/tests/test_leda.py0000664000175000017500000000312612177456333022626 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx import os,tempfile class TestLEDA(object): def test_parse_leda(self): data="""#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" G=nx.parse_leda(data) G=nx.parse_leda(data.split('\n')) assert_equal(sorted(G.nodes()), ['v1', 'v2', 'v3', 'v4', 'v5']) assert_equal([e for e in sorted(G.edges(data=True))], [('v1', 'v2', {'label': '4'}), ('v1', 'v3', {'label': '3'}), ('v2', 'v3', {'label': '2'}), ('v3', 'v4', {'label': '3'}), ('v3', 'v5', {'label': '7'}), ('v4', 'v5', {'label': '6'}), ('v5', 'v1', {'label': 'foo'})]) def test_read_LEDA(self): data="""#header section \nLEDA.GRAPH \nstring\nint\n-1\n#nodes section\n5 \n|{v1}| \n|{v2}| \n|{v3}| \n|{v4}| \n|{v5}| \n\n#edges section\n7 \n1 2 0 |{4}| \n1 3 0 |{3}| \n2 3 0 |{2}| \n3 4 0 |{3}| \n3 5 0 |{7}| \n4 5 0 |{6}| \n5 1 0 |{foo}|""" G=nx.parse_leda(data) (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') b=fh.write(data) fh.close() Gin=nx.read_leda(fname) assert_equal(sorted(G.nodes()),sorted(Gin.nodes())) assert_equal(sorted(G.edges()),sorted(Gin.edges())) os.close(fd) os.unlink(fname) networkx-1.8.1/networkx/readwrite/tests/test_gpickle.py0000664000175000017500000000146012177456333023336 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import assert_equal import networkx as nx import os,tempfile class TestGpickle(object): def setUp(self): G=nx.Graph(name="test") e=[('a','b'),('b','c'),('c','d'),('d','e'),('e','f'),('a','f')] G.add_edges_from(e,width=10) G.add_node('g',color='green') G.graph['number']=1 self.G=G def test_gpickle(self): G=self.G (fd,fname)=tempfile.mkstemp() nx.write_gpickle(G,fname); Gin=nx.read_gpickle(fname); assert_equal(sorted(G.nodes(data=True)), sorted(Gin.nodes(data=True))) assert_equal(sorted(G.edges(data=True)), sorted(Gin.edges(data=True))) assert_equal(G.graph,Gin.graph) os.close(fd) os.unlink(fname) networkx-1.8.1/networkx/readwrite/tests/test_gml.py0000664000175000017500000000603312177456333022500 0ustar aricaric00000000000000#!/usr/bin/env python import io from nose.tools import * from nose import SkipTest import networkx class TestGraph(object): @classmethod def setupClass(cls): global pyparsing try: import pyparsing except ImportError: try: import matplotlib.pyparsing as pyparsing except: raise SkipTest('gml test: pyparsing not available.') def setUp(self): self.simple_data="""Creator me graph [ comment "This is a sample graph" directed 1 IsPlanar 1 pos [ x 0 y 1 ] node [ id 1 label "Node 1" pos [ x 1 y 1 ] ] node [ id 2 pos [ x 1 y 2 ] label "Node 2" ] node [ id 3 label "Node 3" pos [ x 1 y 3 ] ] edge [ source 1 target 2 label "Edge from node 1 to node 2" color [line "blue" thickness 3] ] edge [ source 2 target 3 label "Edge from node 2 to node 3" ] edge [ source 3 target 1 label "Edge from node 3 to node 1" ] ] """ def test_parse_gml(self): G=networkx.parse_gml(self.simple_data,relabel=True) assert_equals(sorted(G.nodes()),\ ['Node 1', 'Node 2', 'Node 3']) assert_equals( [e for e in sorted(G.edges())],\ [('Node 1', 'Node 2'), ('Node 2', 'Node 3'), ('Node 3', 'Node 1')]) assert_equals( [e for e in sorted(G.edges(data=True))],\ [('Node 1', 'Node 2', {'color': {'line': 'blue', 'thickness': 3}, 'label': 'Edge from node 1 to node 2'}), ('Node 2', 'Node 3', {'label': 'Edge from node 2 to node 3'}), ('Node 3', 'Node 1', {'label': 'Edge from node 3 to node 1'})]) def test_read_gml(self): import os,tempfile (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') fh.write(self.simple_data) fh.close() Gin=networkx.read_gml(fname,relabel=True) G=networkx.parse_gml(self.simple_data,relabel=True) assert_equals( sorted(G.nodes(data=True)), sorted(Gin.nodes(data=True))) assert_equals( sorted(G.edges(data=True)), sorted(Gin.edges(data=True))) os.close(fd) os.unlink(fname) def test_relabel_duplicate(self): data=""" graph [ label "" directed 1 node [ id 0 label "same" ] node [ id 1 label "same" ] ] """ fh = io.BytesIO(data.encode('UTF-8')) fh.seek(0) assert_raises(networkx.NetworkXError,networkx.read_gml,fh,relabel=True) def test_bool(self): G=networkx.Graph() G.add_node(1,on=True) G.add_edge(1,2,on=False) data = '\n'.join(list(networkx.generate_gml(G))) answer ="""graph [ node [ id 0 label 1 on 1 ] node [ id 1 label 2 ] edge [ source 0 target 1 on 0 ] ]""" assert_equal(data,answer) networkx-1.8.1/networkx/readwrite/nx_shp.py0000664000175000017500000001735112177456333021024 0ustar aricaric00000000000000""" ********* Shapefile ********* Generates a networkx.DiGraph from point and line shapefiles. "The Esri Shapefile or simply a shapefile is a popular geospatial vector data format for geographic information systems software. It is developed and regulated by Esri as a (mostly) open specification for data interoperability among Esri and other software products." See http://en.wikipedia.org/wiki/Shapefile for additional information. """ # Copyright (C) 2004-2010 by # Ben Reilly # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = """Ben Reilly (benwreilly@gmail.com)""" __all__ = ['read_shp', 'write_shp'] def read_shp(path): """Generates a networkx.DiGraph from shapefiles. Point geometries are translated into nodes, lines into edges. Coordinate tuples are used as keys. Attributes are preserved, line geometries are simplified into start and end coordinates. Accepts a single shapefile or directory of many shapefiles. "The Esri Shapefile or simply a shapefile is a popular geospatial vector data format for geographic information systems software [1]_." Parameters ---------- path : file or string File, directory, or filename to read. Returns ------- G : NetworkX graph Examples -------- >>> G=nx.read_shp('test.shp') # doctest: +SKIP References ---------- .. [1] http://en.wikipedia.org/wiki/Shapefile """ try: from osgeo import ogr except ImportError: raise ImportError("read_shp requires OGR: http://www.gdal.org/") net = nx.DiGraph() def getfieldinfo(lyr, feature, flds): f = feature return [f.GetField(f.GetFieldIndex(x)) for x in flds] def addlyr(lyr, fields): for findex in xrange(lyr.GetFeatureCount()): f = lyr.GetFeature(findex) flddata = getfieldinfo(lyr, f, fields) g = f.geometry() attributes = dict(zip(fields, flddata)) attributes["ShpName"] = lyr.GetName() if g.GetGeometryType() == 1: # point net.add_node((g.GetPoint_2D(0)), attributes) if g.GetGeometryType() == 2: # linestring attributes["Wkb"] = g.ExportToWkb() attributes["Wkt"] = g.ExportToWkt() attributes["Json"] = g.ExportToJson() last = g.GetPointCount() - 1 net.add_edge(g.GetPoint_2D(0), g.GetPoint_2D(last), attributes) if isinstance(path, str): shp = ogr.Open(path) lyrcount = shp.GetLayerCount() # multiple layers indicate a directory for lyrindex in xrange(lyrcount): lyr = shp.GetLayerByIndex(lyrindex) flds = [x.GetName() for x in lyr.schema] addlyr(lyr, flds) return net def write_shp(G, outdir): """Writes a networkx.DiGraph to two shapefiles, edges and nodes. Nodes and edges are expected to have a Well Known Binary (Wkb) or Well Known Text (Wkt) key in order to generate geometries. Also acceptable are nodes with a numeric tuple key (x,y). "The Esri Shapefile or simply a shapefile is a popular geospatial vector data format for geographic information systems software [1]_." Parameters ---------- outdir : directory path Output directory for the two shapefiles. Returns ------- None Examples -------- nx.write_shp(digraph, '/shapefiles') # doctest +SKIP References ---------- .. [1] http://en.wikipedia.org/wiki/Shapefile """ try: from osgeo import ogr except ImportError: raise ImportError("write_shp requires OGR: http://www.gdal.org/") # easier to debug in python if ogr throws exceptions ogr.UseExceptions() def netgeometry(key, data): if 'Wkb' in data: geom = ogr.CreateGeometryFromWkb(data['Wkb']) elif 'Wkt' in data: geom = ogr.CreateGeometryFromWkt(data['Wkt']) elif type(key[0]).__name__ == 'tuple': # edge keys are packed tuples geom = ogr.Geometry(ogr.wkbLineString) _from, _to = key[0], key[1] try: geom.SetPoint(0, *_from) geom.SetPoint(1, *_to) except TypeError: # assume user used tuple of int and choked ogr _ffrom = [float(x) for x in _from] _fto = [float(x) for x in _to] geom.SetPoint(0, *_ffrom) geom.SetPoint(1, *_fto) else: geom = ogr.Geometry(ogr.wkbPoint) try: geom.SetPoint(0, *key) except TypeError: # assume user used tuple of int and choked ogr fkey = [float(x) for x in key] geom.SetPoint(0, *fkey) return geom # Create_feature with new optional attributes arg (should be dict type) def create_feature(geometry, lyr, attributes=None): feature = ogr.Feature(lyr.GetLayerDefn()) feature.SetGeometry(g) if attributes != None: # Loop through attributes, assigning data to each field for field, data in attributes.iteritems(): feature.SetField(field, data) lyr.CreateFeature(feature) feature.Destroy() drv = ogr.GetDriverByName("ESRI Shapefile") shpdir = drv.CreateDataSource(outdir) # delete pre-existing output first otherwise ogr chokes try: shpdir.DeleteLayer("nodes") except: pass nodes = shpdir.CreateLayer("nodes", None, ogr.wkbPoint) for n in G: data = G.node[n] or {} g = netgeometry(n, data) create_feature(g, nodes) try: shpdir.DeleteLayer("edges") except: pass edges = shpdir.CreateLayer("edges", None, ogr.wkbLineString) # New edge attribute write support merged into edge loop fields = {} # storage for field names and their data types attributes = {} # storage for attribute data (indexed by field names) # Conversion dict between python and ogr types OGRTypes = {int: ogr.OFTInteger, str: ogr.OFTString, float: ogr.OFTReal} # Edge loop for e in G.edges(data=True): data = G.get_edge_data(*e) g = netgeometry(e, data) # Loop through attribute data in edges for key, data in e[2].iteritems(): # Reject spatial data not required for attribute table if (key != 'Json' and key != 'Wkt' and key != 'Wkb' and key != 'ShpName'): # For all edges check/add field and data type to fields dict if key not in fields: # Field not in previous edges so add to dict if type(data) in OGRTypes: fields[key] = OGRTypes[type(data)] else: # Data type not supported, default to string (char 80) fields[key] = ogr.OFTString # Create the new field newfield = ogr.FieldDefn(key, fields[key]) edges.CreateField(newfield) # Store the data from new field to dict for CreateLayer() attributes[key] = data else: # Field already exists, add data to dict for CreateLayer() attributes[key] = data # Create the feature with, passing new attribute data create_feature(g, edges, attributes) nodes, edges = None, None # fixture for nose tests def setup_module(module): from nose import SkipTest try: import ogr except: raise SkipTest("OGR not available") networkx-1.8.1/networkx/readwrite/sparsegraph6.py0000664000175000017500000001121512177456333022123 0ustar aricaric00000000000000""" ************** SparseGraph 6 ************** Read graphs in graph6 and sparse6 format. Format ------ "graph6 and sparse6 are formats for storing undirected graphs in a compact manner, using only printable ASCII characters. Files in these formats have text type and contain one line per graph." http://cs.anu.edu.au/~bdm/data/formats.html See http://cs.anu.edu.au/~bdm/data/formats.txt for details. """ # Original author: D. Eppstein, UC Irvine, August 12, 2003. # The original code at http://www.ics.uci.edu/~eppstein/PADS/ is public domain. __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['read_graph6', 'parse_graph6', 'read_graph6_list', 'read_sparse6', 'parse_sparse6', 'read_sparse6_list'] import networkx as nx from networkx.exception import NetworkXError from networkx.utils import open_file # graph6 def read_graph6(path): """Read simple undirected graphs in graph6 format from path. Returns a single Graph. """ return read_graph6_list(path)[0] def parse_graph6(str): """Read a simple undirected graph in graph6 format from string. Returns a single Graph. """ def bits(): """Return sequence of individual bits from 6-bit-per-value list of data values.""" for d in data: for i in [5,4,3,2,1,0]: yield (d>>i)&1 if str.startswith('>>graph6<<'): str = str[10:] data = graph6data(str) n, data = graph6n(data) nd = (n*(n-1)//2 + 5) // 6 if len(data) != nd: raise NetworkXError(\ 'Expected %d bits but got %d in graph6' % (n*(n-1)//2, len(data)*6)) G=nx.Graph() G.add_nodes_from(range(n)) for (i,j),b in zip([(i,j) for j in range(1,n) for i in range(j)], bits()): if b: G.add_edge(i,j) return G @open_file(0,mode='rt') def read_graph6_list(path): """Read simple undirected graphs in graph6 format from path. Returns a list of Graphs, one for each line in file. """ glist=[] for line in path: line = line.strip() if not len(line): continue glist.append(parse_graph6(line)) return glist # sparse6 def read_sparse6(path): """Read simple undirected graphs in sparse6 format from path. Returns a single MultiGraph.""" return read_sparse6_list(path)[0] @open_file(0,mode='rt') def read_sparse6_list(path): """Read undirected graphs in sparse6 format from path. Returns a list of MultiGraphs, one for each line in file.""" glist=[] for line in path: line = line.strip() if not len(line): continue glist.append(parse_sparse6(line)) return glist def parse_sparse6(string): """Read undirected graph in sparse6 format from string. Returns a MultiGraph. """ if string.startswith('>>sparse6<<'): string = str[10:] if not string.startswith(':'): raise NetworkXError('Expected colon in sparse6') n, data = graph6n(graph6data(string[1:])) k = 1 while 1<>dLen) & 1 # grab top remaining bit x = d & ((1<> (xLen - k)) # shift back the extra bits dLen = xLen - k yield b,x v = 0 G=nx.MultiGraph() G.add_nodes_from(range(n)) for b,x in parseData(): if b: v += 1 if x >= n: break # padding with ones can cause overlarge number here elif x > v: v = x else: G.add_edge(x,v) return G # helper functions def graph6data(str): """Convert graph6 character sequence to 6-bit integers.""" v = [ord(c)-63 for c in str] if min(v) < 0 or max(v) > 63: return None return v def graph6n(data): """Read initial one or four-unit value from graph6 sequence. Return value, rest of seq.""" if data[0] <= 62: return data[0], data[1:] return (data[1]<<12) + (data[2]<<6) + data[3], data[4:] networkx-1.8.1/networkx/readwrite/json_graph/0000775000175000017500000000000012177457361021300 5ustar aricaric00000000000000networkx-1.8.1/networkx/readwrite/json_graph/tree.py0000664000175000017500000000575612177456333022624 0ustar aricaric00000000000000# Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from itertools import count,repeat import json import networkx as nx __author__ = """Aric Hagberg (hagberg@lanl.gov))""" __all__ = ['tree_data', 'tree_graph'] def tree_data(G, root): """Return data in tree format that is suitable for JSON serialization and use in Javascript documents. Parameters ---------- G : NetworkX graph G must be an oriented tree root : node The root of the tree Returns ------- data : dict A dictionary with node-link formatted data. Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.DiGraph([(1,2)]) >>> data = json_graph.tree_data(G,root=1) To serialize with json >>> import json >>> s = json.dumps(data) Notes ----- Node attributes are stored in this format but keys for attributes must be strings if you want to serialize with JSON. Graph and edge attributes are not stored. See Also -------- tree_graph, node_link_data, node_link_data """ if not G.number_of_nodes()==G.number_of_edges()+1: raise TypeError("G is not a tree.") if not G.is_directed(): raise TypeError("G is not directed") def add_children(n, G): nbrs = G[n] if len(nbrs) == 0: return [] children = [] for child in nbrs: d = dict(id=child, **G.node[child]) c = add_children(child,G) if c: d['children'] = c children.append(d) return children data = dict(id=root, **G.node[root]) data['children'] = add_children(root,G) return data def tree_graph(data): """Return graph from tree data format. Parameters ---------- data : dict Tree formatted graph data Returns ------- G : NetworkX DiGraph Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.DiGraph([(1,2)]) >>> data = json_graph.tree_data(G,root=1) >>> H = json_graph.tree_graph(data) See Also -------- tree_graph, node_link_data, adjacency_data """ graph = nx.DiGraph() def add_children(parent, children): for data in children: child = data['id'] graph.add_edge(parent, child) grandchildren = data.get('children',[]) if grandchildren: add_children(child,grandchildren) nodedata = dict((str(k),v) for k,v in data.items() if k!='id' and k!='children') graph.add_node(child,attr_dict=nodedata) root = data['id'] children = data.get('children',[]) nodedata = dict((k,v) for k,v in data.items() if k!='id' and k!='children') graph.add_node(root, attr_dict=nodedata) add_children(root, children) return graph networkx-1.8.1/networkx/readwrite/json_graph/adjacency.py0000664000175000017500000000664212177456333023601 0ustar aricaric00000000000000# Copyright (C) 2011-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from copy import deepcopy from itertools import count,repeat import json import networkx as nx __author__ = """Aric Hagberg """ __all__ = ['adjacency_data', 'adjacency_graph'] def adjacency_data(G): """Return data in adjacency format that is suitable for JSON serialization and use in Javascript documents. Parameters ---------- G : NetworkX graph Returns ------- data : dict A dictionary with node-link formatted data. Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.Graph([(1,2)]) >>> data = json_graph.adjacency_data(G) To serialize with json >>> import json >>> s = json.dumps(data) Notes ----- Graph, node, and link attributes will be written when using this format but attribute keys must be strings if you want to serialize the resulting data with JSON. See Also -------- adjacency_graph, node_link_data, tree_data """ multigraph = G.is_multigraph() data = {} data['directed'] = G.is_directed() data['multigraph'] = multigraph data['graph'] = list(G.graph.items()) data['nodes'] = [] data['adjacency'] = [] for n,nbrdict in G.adjacency_iter(): data['nodes'].append(dict(id=n, **G.node[n])) adj = [] if multigraph: for nbr,key in nbrdict.items(): for k,d in key.items(): adj.append(dict(id=nbr, key=k, **d)) else: for nbr,d in nbrdict.items(): adj.append(dict(id=nbr, **d)) data['adjacency'].append(adj) return data def adjacency_graph(data, directed=False, multigraph=True): """Return graph from adjacency data format. Parameters ---------- data : dict Adjacency list formatted graph data Returns ------- G : NetworkX graph A NetworkX graph object directed : bool If True, and direction not specified in data, return a directed graph. multigraph : bool If True, and multigraph not specified in data, return a multigraph. Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.Graph([(1,2)]) >>> data = json_graph.adjacency_data(G) >>> H = json_graph.adjacency_graph(data) See Also -------- adjacency_graph, node_link_data, tree_data """ multigraph = data.get('multigraph',multigraph) directed = data.get('directed',directed) if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() graph.graph = dict(data.get('graph',[])) mapping=[] for d in data['nodes']: node_data = d.copy() node = node_data.pop('id') mapping.append(node) graph.add_node(node, attr_dict=node_data) for i,d in enumerate(data['adjacency']): source = mapping[i] for tdata in d: target_data = tdata.copy() target = target_data.pop('id') key = target_data.pop('key', None) if not multigraph or key is None: graph.add_edge(source,target,attr_dict=tdata) else: graph.add_edge(source,target,key=key, attr_dict=tdata) return graph networkx-1.8.1/networkx/readwrite/json_graph/__init__.py0000664000175000017500000000047112177456333023411 0ustar aricaric00000000000000""" ********* JSON data ********* Generate and parse JSON serializable data for NetworkX graphs. """ from networkx.readwrite.json_graph.node_link import * from networkx.readwrite.json_graph.adjacency import * from networkx.readwrite.json_graph.tree import * from networkx.readwrite.json_graph.serialize import * networkx-1.8.1/networkx/readwrite/json_graph/tests/0000775000175000017500000000000012177457361022442 5ustar aricaric00000000000000networkx-1.8.1/networkx/readwrite/json_graph/tests/test_node_link.py0000664000175000017500000000250712177456333026017 0ustar aricaric00000000000000import json from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true import networkx as nx from networkx.readwrite.json_graph import * class TestNodeLink: def test_graph(self): G = nx.path_graph(4) H = node_link_graph(node_link_data(G)) nx.is_isomorphic(G,H) def test_graph_attributes(self): G = nx.path_graph(4) G.add_node(1,color='red') G.add_edge(1,2,width=7) G.graph[1]='one' G.graph['foo']='bar' H = node_link_graph(node_link_data(G)) assert_equal(H.graph['foo'],'bar') assert_equal(H.node[1]['color'],'red') assert_equal(H[1][2]['width'],7) d=json.dumps(node_link_data(G)) H = node_link_graph(json.loads(d)) assert_equal(H.graph['foo'],'bar') assert_equal(H.graph[1],'one') assert_equal(H.node[1]['color'],'red') assert_equal(H[1][2]['width'],7) def test_digraph(self): G = nx.DiGraph() H = node_link_graph(node_link_data(G)) assert_true(H.is_directed()) def test_multigraph(self): G = nx.MultiGraph() G.add_edge(1,2,key='first') G.add_edge(1,2,key='second',color='blue') H = node_link_graph(node_link_data(G)) nx.is_isomorphic(G,H) assert_equal(H[1][2]['second']['color'],'blue') networkx-1.8.1/networkx/readwrite/json_graph/tests/test_adjacency.py0000664000175000017500000000313212177456333025771 0ustar aricaric00000000000000import json from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true import networkx as nx from networkx.readwrite.json_graph import * class TestAdjacency: def test_graph(self): G = nx.path_graph(4) H = adjacency_graph(adjacency_data(G)) nx.is_isomorphic(G,H) def test_graph_attributes(self): G = nx.path_graph(4) G.add_node(1,color='red') G.add_edge(1,2,width=7) G.graph['foo']='bar' G.graph[1]='one' H = adjacency_graph(adjacency_data(G)) assert_equal(H.graph['foo'],'bar') assert_equal(H.node[1]['color'],'red') assert_equal(H[1][2]['width'],7) d = json.dumps(adjacency_data(G)) H = adjacency_graph(json.loads(d)) assert_equal(H.graph['foo'],'bar') assert_equal(H.graph[1],'one') assert_equal(H.node[1]['color'],'red') assert_equal(H[1][2]['width'],7) def test_digraph(self): G = nx.DiGraph() G.add_path([1,2,3]) H = adjacency_graph(adjacency_data(G)) assert_true(H.is_directed()) nx.is_isomorphic(G,H) def test_multidigraph(self): G = nx.MultiDiGraph() G.add_path([1,2,3]) H = adjacency_graph(adjacency_data(G)) assert_true(H.is_directed()) assert_true(H.is_multigraph()) def test_multigraph(self): G = nx.MultiGraph() G.add_edge(1,2,key='first') G.add_edge(1,2,key='second',color='blue') H = adjacency_graph(adjacency_data(G)) nx.is_isomorphic(G,H) assert_equal(H[1][2]['second']['color'],'blue') networkx-1.8.1/networkx/readwrite/json_graph/tests/test_tree.py0000664000175000017500000000152412177456333025012 0ustar aricaric00000000000000import json from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true import networkx as nx from networkx.readwrite.json_graph import * class TestTree: def test_graph(self): G=nx.DiGraph() G.add_nodes_from([1,2,3],color='red') G.add_edge(1,2,foo=7) G.add_edge(1,3,foo=10) G.add_edge(3,4,foo=10) H = tree_graph(tree_data(G,1)) nx.is_isomorphic(G,H) def test_graph_attributes(self): G=nx.DiGraph() G.add_nodes_from([1,2,3],color='red') G.add_edge(1,2,foo=7) G.add_edge(1,3,foo=10) G.add_edge(3,4,foo=10) H = tree_graph(tree_data(G,1)) assert_equal(H.node[1]['color'],'red') d = json.dumps(tree_data(G,1)) H = tree_graph(json.loads(d)) assert_equal(H.node[1]['color'],'red') networkx-1.8.1/networkx/readwrite/json_graph/tests/test_serialize.py0000664000175000017500000000246112177456333026043 0ustar aricaric00000000000000import json from nose.tools import assert_equal, assert_raises, assert_not_equal,assert_true import networkx as nx from networkx.readwrite.json_graph import * class TestAdjacency: def test_graph(self): G = nx.path_graph(4) H = loads(dumps(G)) nx.is_isomorphic(G,H) def test_graph_attributes(self): G = nx.path_graph(4) G.add_node(1,color='red') G.add_edge(1,2,width=7) G.graph['foo']='bar' G.graph[1]='one' H = loads(dumps(G)) assert_equal(H.graph['foo'],'bar') assert_equal(H.graph[1],'one') assert_equal(H.node[1]['color'],'red') assert_equal(H[1][2]['width'],7) try: from StringIO import StringIO except: from io import StringIO io = StringIO() dump(G,io) io.seek(0) H=load(io) assert_equal(H.graph['foo'],'bar') assert_equal(H.graph[1],'one') assert_equal(H.node[1]['color'],'red') assert_equal(H[1][2]['width'],7) def test_digraph(self): G = nx.DiGraph() H = loads(dumps(G)) assert_true(H.is_directed()) def test_multidigraph(self): G = nx.MultiDiGraph() H = loads(dumps(G)) assert_true(H.is_directed()) assert_true(H.is_multigraph()) networkx-1.8.1/networkx/readwrite/json_graph/node_link.py0000664000175000017500000000635512177456333023623 0ustar aricaric00000000000000# Copyright (C) 2011-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from itertools import count,repeat import json import networkx as nx __author__ = """Aric Hagberg """ __all__ = ['node_link_data', 'node_link_graph'] def node_link_data(G): """Return data in node-link format that is suitable for JSON serialization and use in Javascript documents. Parameters ---------- G : NetworkX graph Returns ------- data : dict A dictionary with node-link formatted data. Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.Graph([(1,2)]) >>> data = json_graph.node_link_data(G) To serialize with json >>> import json >>> s = json.dumps(data) Notes ----- Graph, node, and link attributes are stored in this format but keys for attributes must be strings if you want to serialize with JSON. See Also -------- node_link_graph, adjacency_data, tree_data """ multigraph = G.is_multigraph() mapping = dict(zip(G,count())) data = {} data['directed'] = G.is_directed() data['multigraph'] = multigraph data['graph'] = list(G.graph.items()) data['nodes'] = [ dict(id=n, **G.node[n]) for n in G ] if multigraph: data['links'] = [ dict(source=mapping[u], target=mapping[v], key=k, **d) for u,v,k,d in G.edges(keys=True, data=True) ] else: data['links'] = [ dict(source=mapping[u], target=mapping[v], **d) for u,v,d in G.edges(data=True) ] return data def node_link_graph(data, directed=False, multigraph=True): """Return graph from node-link data format. Parameters ---------- data : dict node-link formatted graph data directed : bool If True, and direction not specified in data, return a directed graph. multigraph : bool If True, and multigraph not specified in data, return a multigraph. Returns ------- G : NetworkX graph A NetworkX graph object Examples -------- >>> from networkx.readwrite import json_graph >>> G = nx.Graph([(1,2)]) >>> data = json_graph.node_link_data(G) >>> H = json_graph.node_link_graph(data) See Also -------- node_link_data, adjacency_data, tree_data """ multigraph = data.get('multigraph',multigraph) directed = data.get('directed',directed) if multigraph: graph = nx.MultiGraph() else: graph = nx.Graph() if directed: graph = graph.to_directed() mapping=[] graph.graph = dict(data.get('graph',[])) c = count() for d in data['nodes']: node = d.get('id',next(c)) mapping.append(node) nodedata = dict((str(k),v) for k,v in d.items() if k!='id') graph.add_node(node, **nodedata) for d in data['links']: link_data = d.copy() source = link_data.pop('source') target = link_data.pop('target') edgedata = dict((str(k),v) for k,v in d.items() if k!='source' and k!='target') graph.add_edge(mapping[source],mapping[target],**edgedata) return graph networkx-1.8.1/networkx/readwrite/json_graph/serialize.py0000664000175000017500000000175612177456333023650 0ustar aricaric00000000000000# Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from functools import partial,update_wrapper import json from networkx.readwrite.json_graph import node_link_data,node_link_graph __author__ = """Aric Hagberg (hagberg@lanl.gov))""" __all__ = ['dumps','loads','dump','load'] class NXJSONEncoder(json.JSONEncoder): def default(self, o): return node_link_data(o) class NXJSONDecoder(json.JSONDecoder): def decode(self, s): d = json.loads(s) return node_link_graph(d) # modification of json functions to serialize networkx graphs dumps = partial(json.dumps, cls=NXJSONEncoder) update_wrapper(dumps,json.dumps) loads = partial(json.loads, cls=NXJSONDecoder) update_wrapper(loads,json.loads) dump = partial(json.dump, cls=NXJSONEncoder) update_wrapper(dump,json.dump) load = partial(json.load, cls=NXJSONDecoder) update_wrapper(load,json.load) networkx-1.8.1/networkx/readwrite/gpickle.py0000664000175000017500000000516712177456333021145 0ustar aricaric00000000000000""" ************** Pickled Graphs ************** Read and write NetworkX graphs as Python pickles. "The pickle module implements a fundamental, but powerful algorithm for serializing and de-serializing a Python object structure. "Pickling" is the process whereby a Python object hierarchy is converted into a byte stream, and "unpickling" is the inverse operation, whereby a byte stream is converted back into an object hierarchy." Note that NetworkX graphs can contain any hashable Python object as node (not just integers and strings). For arbitrary data types it may be difficult to represent the data as text. In that case using Python pickles to store the graph data can be used. Format ------ See http://docs.python.org/library/pickle.html """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)""" # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['read_gpickle', 'write_gpickle'] import networkx as nx from networkx.utils import open_file try: import cPickle as pickle except ImportError: import pickle @open_file(1,mode='wb') def write_gpickle(G, path): """Write graph in Python pickle format. Pickles are a serialized byte stream of a Python object [1]_. This format will preserve Python objects used as nodes or edges. Parameters ---------- G : graph A NetworkX graph path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be compressed. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_gpickle(G,"test.gpickle") References ---------- .. [1] http://docs.python.org/library/pickle.html """ pickle.dump(G, path, pickle.HIGHEST_PROTOCOL) @open_file(0,mode='rb') def read_gpickle(path): """Read graph object in Python pickle format. Pickles are a serialized byte stream of a Python object [1]_. This format will preserve Python objects used as nodes or edges. Parameters ---------- path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be uncompressed. Returns ------- G : graph A NetworkX graph Examples -------- >>> G=nx.path_graph(4) >>> nx.write_gpickle(G,"test.gpickle") >>> G=nx.read_gpickle("test.gpickle") References ---------- .. [1] http://docs.python.org/library/pickle.html """ return pickle.load(path) # fixture for nose tests def teardown_module(module): import os os.unlink('test.gpickle') networkx-1.8.1/networkx/readwrite/edgelist.py0000664000175000017500000003331212177456333021320 0ustar aricaric00000000000000""" ********** Edge Lists ********** Read and write NetworkX graphs as edge lists. The multi-line adjacency list format is useful for graphs with nodes that can be meaningfully represented as strings. With the edgelist format simple edge data can be stored but node or graph data is not. There is no way of representing isolated nodes unless the node has a self-loop edge. Format ------ You can read or write three formats of edge lists with these functions. Node pairs with no data:: 1 2 Python dictionary as data:: 1 2 {'weight':7, 'color':'green'} Arbitrary data:: 1 2 7 green """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)""" # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['generate_edgelist', 'write_edgelist', 'parse_edgelist', 'read_edgelist', 'read_weighted_edgelist', 'write_weighted_edgelist'] from networkx.utils import open_file, make_str import networkx as nx def generate_edgelist(G, delimiter=' ', data=True): """Generate a single line of the graph G in edge list format. Parameters ---------- G : NetworkX graph delimiter : string, optional Separator for node labels data : bool or list of keys If False generate no edge data. If True use a dictionary representation of edge data. If a list of keys use a list of data values corresponding to the keys. Returns ------- lines : string Lines of data in adjlist format. Examples -------- >>> G = nx.lollipop_graph(4, 3) >>> G[1][2]['weight'] = 3 >>> G[3][4]['capacity'] = 12 >>> for line in nx.generate_edgelist(G, data=False): ... print(line) 0 1 0 2 0 3 1 2 1 3 2 3 3 4 4 5 5 6 >>> for line in nx.generate_edgelist(G): ... print(line) 0 1 {} 0 2 {} 0 3 {} 1 2 {'weight': 3} 1 3 {} 2 3 {} 3 4 {'capacity': 12} 4 5 {} 5 6 {} >>> for line in nx.generate_edgelist(G,data=['weight']): ... print(line) 0 1 0 2 0 3 1 2 3 1 3 2 3 3 4 4 5 5 6 See Also -------- write_adjlist, read_adjlist """ if data is True or data is False: for e in G.edges(data=data): yield delimiter.join(map(make_str,e)) else: for u,v,d in G.edges(data=True): e=[u,v] try: e.extend(d[k] for k in data) except KeyError: pass # missing data for this edge, should warn? yield delimiter.join(map(make_str,e)) @open_file(1,mode='wb') def write_edgelist(G, path, comments="#", delimiter=' ', data=True, encoding = 'utf-8'): """Write graph as a list of edges. Parameters ---------- G : graph A NetworkX graph path : file or string File or filename to write. If a file is provided, it must be opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed. comments : string, optional The character used to indicate the start of a comment delimiter : string, optional The string used to separate values. The default is whitespace. data : bool or list, optional If False write no edge data. If True write a string representation of the edge data dictionary.. If a list (or other iterable) is provided, write the keys specified in the list. encoding: string, optional Specify which encoding to use when writing file. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_edgelist(G, "test.edgelist") >>> G=nx.path_graph(4) >>> fh=open("test.edgelist",'wb') >>> nx.write_edgelist(G, fh) >>> nx.write_edgelist(G, "test.edgelist.gz") >>> nx.write_edgelist(G, "test.edgelist.gz", data=False) >>> G=nx.Graph() >>> G.add_edge(1,2,weight=7,color='red') >>> nx.write_edgelist(G,'test.edgelist',data=False) >>> nx.write_edgelist(G,'test.edgelist',data=['color']) >>> nx.write_edgelist(G,'test.edgelist',data=['color','weight']) See Also -------- write_edgelist() write_weighted_edgelist() """ for line in generate_edgelist(G, delimiter, data): line+='\n' path.write(line.encode(encoding)) def parse_edgelist(lines, comments='#', delimiter=None, create_using=None, nodetype=None, data=True): """Parse lines of an edge list representation of a graph. Returns ------- G: NetworkX Graph The graph corresponding to lines data : bool or list of (label,type) tuples If False generate no edge data or if True use a dictionary representation of edge data or a list tuples specifying dictionary key names and types for edge data. create_using: NetworkX graph container, optional Use given NetworkX graph for holding nodes or edges. nodetype : Python type, optional Convert nodes to this type. comments : string, optional Marker for comment lines delimiter : string, optional Separator for node labels create_using: NetworkX graph container Use given NetworkX graph for holding nodes or edges. Examples -------- Edgelist with no data: >>> lines = ["1 2", ... "2 3", ... "3 4"] >>> G = nx.parse_edgelist(lines, nodetype = int) >>> G.nodes() [1, 2, 3, 4] >>> G.edges() [(1, 2), (2, 3), (3, 4)] Edgelist with data in Python dictionary representation: >>> lines = ["1 2 {'weight':3}", ... "2 3 {'weight':27}", ... "3 4 {'weight':3.0}"] >>> G = nx.parse_edgelist(lines, nodetype = int) >>> G.nodes() [1, 2, 3, 4] >>> G.edges(data = True) [(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})] Edgelist with data in a list: >>> lines = ["1 2 3", ... "2 3 27", ... "3 4 3.0"] >>> G = nx.parse_edgelist(lines, nodetype = int, data=(('weight',float),)) >>> G.nodes() [1, 2, 3, 4] >>> G.edges(data = True) [(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})] See Also -------- read_weighted_edgelist """ from ast import literal_eval if create_using is None: G=nx.Graph() else: try: G=create_using G.clear() except: raise TypeError("create_using input is not a NetworkX graph type") for line in lines: p=line.find(comments) if p>=0: line = line[:p] if not len(line): continue # split line, should have 2 or more s=line.strip().split(delimiter) if len(s)<2: continue u=s.pop(0) v=s.pop(0) d=s if nodetype is not None: try: u=nodetype(u) v=nodetype(v) except: raise TypeError("Failed to convert nodes %s,%s to type %s." %(u,v,nodetype)) if len(d)==0 or data is False: # no data or data type specified edgedata={} elif data is True: # no edge types specified try: # try to evaluate as dictionary edgedata=dict(literal_eval(' '.join(d))) except: raise TypeError( "Failed to convert edge data (%s) to dictionary."%(d)) else: # convert edge data to dictionary with specified keys and type if len(d)!=len(data): raise IndexError( "Edge data %s and data_keys %s are not the same length"% (d, data)) edgedata={} for (edge_key,edge_type),edge_value in zip(data,d): try: edge_value=edge_type(edge_value) except: raise TypeError( "Failed to convert %s data %s to type %s." %(edge_key, edge_value, edge_type)) edgedata.update({edge_key:edge_value}) G.add_edge(u, v, attr_dict=edgedata) return G @open_file(0,mode='rb') def read_edgelist(path, comments="#", delimiter=None, create_using=None, nodetype=None, data=True, edgetype=None, encoding='utf-8'): """Read a graph from a list of edges. Parameters ---------- path : file or string File or filename to write. If a file is provided, it must be opened in 'rb' mode. Filenames ending in .gz or .bz2 will be uncompressed. comments : string, optional The character used to indicate the start of a comment. delimiter : string, optional The string used to separate values. The default is whitespace. create_using : Graph container, optional, Use specified container to build graph. The default is networkx.Graph, an undirected graph. nodetype : int, float, str, Python type, optional Convert node data from strings to specified type data : bool or list of (label,type) tuples Tuples specifying dictionary key names and types for edge data edgetype : int, float, str, Python type, optional OBSOLETE Convert edge data from strings to specified type and use as 'weight' encoding: string, optional Specify which encoding to use when reading file. Returns ------- G : graph A networkx Graph or other type specified with create_using Examples -------- >>> nx.write_edgelist(nx.path_graph(4), "test.edgelist") >>> G=nx.read_edgelist("test.edgelist") >>> fh=open("test.edgelist", 'rb') >>> G=nx.read_edgelist(fh) >>> fh.close() >>> G=nx.read_edgelist("test.edgelist", nodetype=int) >>> G=nx.read_edgelist("test.edgelist",create_using=nx.DiGraph()) Edgelist with data in a list: >>> textline = '1 2 3' >>> fh = open('test.edgelist','w') >>> d = fh.write(textline) >>> fh.close() >>> G = nx.read_edgelist('test.edgelist', nodetype=int, data=(('weight',float),)) >>> G.nodes() [1, 2] >>> G.edges(data = True) [(1, 2, {'weight': 3.0})] See parse_edgelist() for more examples of formatting. See Also -------- parse_edgelist Notes ----- Since nodes must be hashable, the function nodetype must return hashable types (e.g. int, float, str, frozenset - or tuples of those, etc.) """ lines = (line.decode(encoding) for line in path) return parse_edgelist(lines,comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, data=data) def write_weighted_edgelist(G, path, comments="#", delimiter=' ', encoding='utf-8'): """Write graph G as a list of edges with numeric weights. Parameters ---------- G : graph A NetworkX graph path : file or string File or filename to write. If a file is provided, it must be opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed. comments : string, optional The character used to indicate the start of a comment delimiter : string, optional The string used to separate values. The default is whitespace. encoding: string, optional Specify which encoding to use when writing file. Examples -------- >>> G=nx.Graph() >>> G.add_edge(1,2,weight=7) >>> nx.write_weighted_edgelist(G, 'test.weighted.edgelist') See Also -------- read_edgelist() write_edgelist() write_weighted_edgelist() """ write_edgelist(G,path, comments=comments, delimiter=delimiter, data=('weight',), encoding = encoding) def read_weighted_edgelist(path, comments="#", delimiter=None, create_using=None, nodetype=None, encoding='utf-8'): """Read a graph as list of edges with numeric weights. Parameters ---------- path : file or string File or filename to write. If a file is provided, it must be opened in 'rb' mode. Filenames ending in .gz or .bz2 will be uncompressed. comments : string, optional The character used to indicate the start of a comment. delimiter : string, optional The string used to separate values. The default is whitespace. create_using : Graph container, optional, Use specified container to build graph. The default is networkx.Graph, an undirected graph. nodetype : int, float, str, Python type, optional Convert node data from strings to specified type encoding: string, optional Specify which encoding to use when reading file. Returns ------- G : graph A networkx Graph or other type specified with create_using Notes ----- Since nodes must be hashable, the function nodetype must return hashable types (e.g. int, float, str, frozenset - or tuples of those, etc.) Example edgelist file format. With numeric edge data:: # read with # >>> G=nx.read_weighted_edgelist(fh) # source target data a b 1 a c 3.14159 d e 42 """ return read_edgelist(path, comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, data=(('weight',float),), encoding = encoding ) # fixture for nose tests def teardown_module(module): import os os.unlink('test.edgelist') os.unlink('test.edgelist.gz') os.unlink('test.weighted.edgelist') networkx-1.8.1/networkx/readwrite/pajek.py0000664000175000017500000001477112177456333020622 0ustar aricaric00000000000000""" ***** Pajek ***** Read graphs in Pajek format. This implementation handles directed and undirected graphs including those with self loops and parallel edges. Format ------ See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ # Copyright (C) 2008-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import is_string_like, open_file, make_str __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['read_pajek', 'parse_pajek', 'generate_pajek', 'write_pajek'] def generate_pajek(G): """Generate lines in Pajek graph format. Parameters ---------- G : graph A Networkx graph References ---------- See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ if G.name=='': name='NetworkX' else: name=G.name # Apparently many Pajek format readers can't process this line # So we'll leave it out for now. # yield '*network %s'%name # write nodes with attributes yield '*vertices %s'%(G.order()) nodes = G.nodes() # make dictionary mapping nodes to integers nodenumber=dict(zip(nodes,range(1,len(nodes)+1))) for n in nodes: na=G.node.get(n,{}) x=na.get('x',0.0) y=na.get('y',0.0) id=int(na.get('id',nodenumber[n])) nodenumber[n]=id shape=na.get('shape','ellipse') s=' '.join(map(make_qstr,(id,n,x,y,shape))) for k,v in na.items(): s+=' %s %s'%(make_qstr(k),make_qstr(v)) yield s # write edges with attributes if G.is_directed(): yield '*arcs' else: yield '*edges' for u,v,edgedata in G.edges(data=True): d=edgedata.copy() value=d.pop('weight',1.0) # use 1 as default edge value s=' '.join(map(make_qstr,(nodenumber[u],nodenumber[v],value))) for k,v in d.items(): s+=' %s %s'%(make_qstr(k),make_qstr(v)) s+=' %s %s'%(k,v) yield s @open_file(1,mode='wb') def write_pajek(G, path, encoding='UTF-8'): """Write graph in Pajek format to path. Parameters ---------- G : graph A Networkx graph path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be compressed. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_pajek(G, "test.net") References ---------- See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ for line in generate_pajek(G): line+='\n' path.write(line.encode(encoding)) @open_file(0,mode='rb') def read_pajek(path,encoding='UTF-8'): """Read graph in Pajek format from path. Parameters ---------- path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be uncompressed. Returns ------- G : NetworkX MultiGraph or MultiDiGraph. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_pajek(G, "test.net") >>> G=nx.read_pajek("test.net") To create a Graph instead of a MultiGraph use >>> G1=nx.Graph(G) References ---------- See http://vlado.fmf.uni-lj.si/pub/networks/pajek/doc/draweps.htm for format information. """ lines = (line.decode(encoding) for line in path) return parse_pajek(lines) def parse_pajek(lines): """Parse Pajek format graph from string or iterable. Parameters ---------- lines : string or iterable Data in Pajek format. Returns ------- G : NetworkX graph See Also -------- read_pajek() """ import shlex # multigraph=False if is_string_like(lines): lines=iter(lines.split('\n')) lines = iter([line.rstrip('\n') for line in lines]) G=nx.MultiDiGraph() # are multiedges allowed in Pajek? assume yes while lines: try: l=next(lines) except: #EOF break if l.lower().startswith("*network"): label,name=l.split() G.name=name if l.lower().startswith("*vertices"): nodelabels={} l,nnodes=l.split() for i in range(int(nnodes)): splitline=shlex.split(str(next(lines))) id,label=splitline[0:2] G.add_node(label) nodelabels[id]=label G.node[label]={'id':id} try: x,y,shape=splitline[2:5] G.node[label].update({'x':float(x), 'y':float(y), 'shape':shape}) except: pass extra_attr=zip(splitline[5::2],splitline[6::2]) G.node[label].update(extra_attr) if l.lower().startswith("*edges") or l.lower().startswith("*arcs"): if l.lower().startswith("*edge"): # switch from multidigraph to multigraph G=nx.MultiGraph(G) if l.lower().startswith("*arcs"): # switch to directed with multiple arcs for each existing edge G=G.to_directed() for l in lines: splitline=shlex.split(str(l)) if len(splitline)<2: continue ui,vi=splitline[0:2] u=nodelabels.get(ui,ui) v=nodelabels.get(vi,vi) # parse the data attached to this edge and put in a dictionary edge_data={} try: # there should always be a single value on the edge? w=splitline[2:3] edge_data.update({'weight':float(w[0])}) except: pass # if there isn't, just assign a 1 # edge_data.update({'value':1}) extra_attr=zip(splitline[3::2],splitline[4::2]) edge_data.update(extra_attr) # if G.has_edge(u,v): # multigraph=True G.add_edge(u,v,**edge_data) return G def make_qstr(t): """Return the string representation of t. Add outer double-quotes if the string has a space. """ if not is_string_like(t): t = str(t) if " " in t: t=r'"%s"'%t return t # fixture for nose tests def teardown_module(module): import os os.unlink('test.net') networkx-1.8.1/networkx/readwrite/nx_yaml.py0000664000175000017500000000470712177456333021175 0ustar aricaric00000000000000""" **** YAML **** Read and write NetworkX graphs in YAML format. "YAML is a data serialization format designed for human readability and interaction with scripting languages." See http://www.yaml.org for documentation. Format ------ http://pyyaml.org/wiki/PyYAML """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['read_yaml', 'write_yaml'] import networkx as nx from networkx.utils import open_file @open_file(1,mode='w') def write_yaml(G, path, encoding='UTF-8', **kwds): """Write graph G in YAML format to path. YAML is a data serialization format designed for human readability and interaction with scripting languages [1]_. Parameters ---------- G : graph A NetworkX graph path : file or string File or filename to write. Filenames ending in .gz or .bz2 will be compressed. encoding: string, optional Specify which encoding to use when writing file. Examples -------- >>> G=nx.path_graph(4) >>> nx.write_yaml(G,'test.yaml') References ---------- .. [1] http://www.yaml.org """ try: import yaml except ImportError: raise ImportError("write_yaml() requires PyYAML: http://pyyaml.org/") yaml.dump(G, path, **kwds) @open_file(0,mode='r') def read_yaml(path): """Read graph in YAML format from path. YAML is a data serialization format designed for human readability and interaction with scripting languages [1]_. Parameters ---------- path : file or string File or filename to read. Filenames ending in .gz or .bz2 will be uncompressed. Returns ------- G : NetworkX graph Examples -------- >>> G=nx.path_graph(4) >>> nx.write_yaml(G,'test.yaml') >>> G=nx.read_yaml('test.yaml') References ---------- .. [1] http://www.yaml.org """ try: import yaml except ImportError: raise ImportError("read_yaml() requires PyYAML: http://pyyaml.org/") G=yaml.load(path) return G # fixture for nose tests def setup_module(module): from nose import SkipTest try: import yaml except: raise SkipTest("PyYAML not available") # fixture for nose tests def teardown_module(module): import os os.unlink('test.yaml') networkx-1.8.1/networkx/version.py0000664000175000017500000000113312177456646017222 0ustar aricaric00000000000000""" Version information for NetworkX, created during installation. Do not add this file to the repository. """ import datetime version = '1.8.1' date = 'Sun Aug 4 07:56:54 2013' # Was NetworkX built from a development version? If so, remember that the major # and minor versions reference the "target" (rather than "current") release. dev = False # Format: (name, major, min, revision) version_info = ('networkx', '1', '8.1', None) # Format: a 'datetime.datetime' instance date_info = datetime.datetime(2013, 8, 4, 7, 56, 54, 416491) # Format: (vcs, vcs_tuple) vcs_info = (None, (None, None)) networkx-1.8.1/networkx/exception.py0000664000175000017500000000317412177456333017533 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ********** Exceptions ********** Base exceptions and errors for NetworkX. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult(dschult@colgate.edu)\nLoïc Séguin-C. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # # Exception handling # the root of all Exceptions class NetworkXException(Exception): """Base class for exceptions in NetworkX.""" class NetworkXError(NetworkXException): """Exception for a serious error in NetworkX""" class NetworkXPointlessConcept(NetworkXException): """Harary, F. and Read, R. "Is the Null Graph a Pointless Concept?" In Graphs and Combinatorics Conference, George Washington University. New York: Springer-Verlag, 1973. """ class NetworkXAlgorithmError(NetworkXException): """Exception for unexpected termination of algorithms.""" class NetworkXUnfeasible(NetworkXAlgorithmError): """Exception raised by algorithms trying to solve a problem instance that has no feasible solution.""" class NetworkXNoPath(NetworkXUnfeasible): """Exception for algorithms that should return a path when running on graphs where such a path does not exist.""" class NetworkXUnbounded(NetworkXAlgorithmError): """Exception raised by algorithms trying to solve a maximization or a minimization problem instance that is unbounded.""" class NetworkXNotImplemented(NetworkXException): """Exception raised by algorithms not implemented for a type of graph.""" networkx-1.8.1/networkx/__init__.py0000664000175000017500000000356112177456333017274 0ustar aricaric00000000000000""" NetworkX ======== NetworkX (NX) is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. https://networkx.lanl.gov/ Using ----- Just write in Python >>> import networkx as nx >>> G=nx.Graph() >>> G.add_edge(1,2) >>> G.add_node(42) >>> print(sorted(G.nodes())) [1, 2, 42] >>> print(sorted(G.edges())) [(1, 2)] """ # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # # Add platform dependent shared library path to sys.path # from __future__ import absolute_import import sys if sys.version_info[:2] < (2, 6): m = "Python version 2.6 or later is required for NetworkX (%d.%d detected)." raise ImportError(m % sys.version_info[:2]) del sys # Release data from networkx import release __author__ = '%s <%s>\n%s <%s>\n%s <%s>' % \ ( release.authors['Hagberg'] + release.authors['Schult'] + \ release.authors['Swart'] ) __license__ = release.license __date__ = release.date __version__ = release.version #These are import orderwise from networkx.exception import * import networkx.external import networkx.utils # these packages work with Python >= 2.6 import networkx.classes from networkx.classes import * import networkx.convert from networkx.convert import * import networkx.relabel from networkx.relabel import * import networkx.generators from networkx.generators import * import networkx.readwrite from networkx.readwrite import * #Need to test with SciPy, when available import networkx.algorithms from networkx.algorithms import * import networkx.linalg from networkx.linalg import * from networkx.tests.test import run as test import networkx.drawing from networkx.drawing import * networkx-1.8.1/networkx/external/0000775000175000017500000000000012177457361017002 5ustar aricaric00000000000000networkx-1.8.1/networkx/external/__init__.py0000664000175000017500000000000012177456333021077 0ustar aricaric00000000000000networkx-1.8.1/networkx/external/decorator/0000775000175000017500000000000012177457361020764 5ustar aricaric00000000000000networkx-1.8.1/networkx/external/decorator/__init__.py0000664000175000017500000000027012177456333023072 0ustar aricaric00000000000000""" Hack for including decorator-3.3.1 in NetworkX. """ import sys if sys.version >= '3': from .decorator3._decorator3 import * else: from .decorator2._decorator2 import * networkx-1.8.1/networkx/external/decorator/decorator3/0000775000175000017500000000000012177457361023031 5ustar aricaric00000000000000networkx-1.8.1/networkx/external/decorator/decorator3/__init__.py0000664000175000017500000000000212177456333025130 0ustar aricaric00000000000000# networkx-1.8.1/networkx/external/decorator/decorator3/_decorator3.py0000664000175000017500000002122212177456333025604 0ustar aricaric00000000000000########################## LICENCE ############################### ## ## Copyright (c) 2005-2011, Michele Simionato ## All rights reserved. ## ## Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## Redistributions in bytecode form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in ## the documentation and/or other materials provided with the ## distribution. ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, ## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, ## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS ## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR ## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE ## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH ## DAMAGE. """ Decorator module, see http://pypi.python.org/pypi/decorator for the documentation. """ from __future__ import print_function __version__ = '3.3.1' __all__ = ["decorator", "FunctionMaker", "partial"] import sys, re, inspect try: from functools import partial except ImportError: # for Python version < 2.5 class partial(object): "A simple replacement of functools.partial" def __init__(self, func, *args, **kw): self.func = func self.args = args self.keywords = kw def __call__(self, *otherargs, **otherkw): kw = self.keywords.copy() kw.update(otherkw) return self.func(*(self.args + otherargs), **kw) if sys.version >= '3': from inspect import getfullargspec else: class getfullargspec(object): "A quick and dirty replacement for getfullargspec for Python 2.X" def __init__(self, f): self.args, self.varargs, self.varkw, self.defaults = \ inspect.getargspec(f) self.kwonlyargs = [] self.kwonlydefaults = None self.annotations = getattr(f, '__annotations__', {}) def __iter__(self): yield self.args yield self.varargs yield self.varkw yield self.defaults DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(') # basic functionality class FunctionMaker(object): """ An object with the ability to create functions with a given signature. It has attributes name, doc, module, signature, defaults, dict and methods update and make. """ def __init__(self, func=None, name=None, signature=None, defaults=None, doc=None, module=None, funcdict=None): self.shortsignature = signature if func: # func can be a class or a callable, but not an instance method self.name = func.__name__ if self.name == '': # small hack for lambda functions self.name = '_lambda_' self.doc = func.__doc__ self.module = func.__module__ if inspect.isfunction(func): argspec = getfullargspec(func) for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', 'kwonlydefaults', 'annotations'): setattr(self, a, getattr(argspec, a)) for i, arg in enumerate(self.args): setattr(self, 'arg%d' % i, arg) self.signature = inspect.formatargspec( formatvalue=lambda val: "", *argspec)[1:-1] allargs = list(self.args) if self.varargs: allargs.append('*' + self.varargs) if self.varkw: allargs.append('**' + self.varkw) try: self.shortsignature = ', '.join(allargs) except TypeError: # exotic signature, valid only in Python 2.X self.shortsignature = self.signature self.dict = func.__dict__.copy() # func=None happens when decorating a caller if name: self.name = name if signature is not None: self.signature = signature if defaults: self.defaults = defaults if doc: self.doc = doc if module: self.module = module if funcdict: self.dict = funcdict # check existence required attributes assert hasattr(self, 'name') if not hasattr(self, 'signature'): raise TypeError('You are decorating a non function: %s' % func) def update(self, func, **kw): "Update the signature of func with the data in self" func.__name__ = self.name func.__doc__ = getattr(self, 'doc', None) func.__dict__ = getattr(self, 'dict', {}) func.__defaults__ = getattr(self, 'defaults', ()) func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None) callermodule = sys._getframe(3).f_globals.get('__name__', '?') func.__module__ = getattr(self, 'module', callermodule) func.__dict__.update(kw) def make(self, src_templ, evaldict=None, addsource=False, **attrs): "Make a new function from a given template and update the signature" src = src_templ % vars(self) # expand name and signature evaldict = evaldict or {} mo = DEF.match(src) if mo is None: raise SyntaxError('not a valid function template\n%s' % src) name = mo.group(1) # extract the function name names = set([name] + [arg.strip(' *') for arg in self.shortsignature.split(',')]) for n in names: if n in ('_func_', '_call_'): raise NameError('%s is overridden in\n%s' % (n, src)) if not src.endswith('\n'): # add a newline just for safety src += '\n' # this is needed in old versions of Python try: code = compile(src, '', 'single') # print >> sys.stderr, 'Compiling %s' % src exec(code, evaldict) except: print('Error in generated code:', file=sys.stderr) print(src, file=sys.stderr) raise func = evaldict[name] if addsource: attrs['__source__'] = src self.update(func, **attrs) return func @classmethod def create(cls, obj, body, evaldict, defaults=None, doc=None, module=None, addsource=True, **attrs): """ Create a function from the strings name, signature and body. evaldict is the evaluation dictionary. If addsource is true an attribute __source__ is added to the result. The attributes attrs are added, if any. """ if isinstance(obj, str): # "name(signature)" name, rest = obj.strip().split('(', 1) signature = rest[:-1] #strip a right parens func = None else: # a function name = None signature = None func = obj self = cls(func, name, signature, defaults, doc, module) ibody = '\n'.join(' ' + line for line in body.splitlines()) return self.make('def %(name)s(%(signature)s):\n' + ibody, evaldict, addsource, **attrs) def decorator(caller, func=None): """ decorator(caller) converts a caller function into a decorator; decorator(caller, func) decorates a function using a caller. """ if func is not None: # returns a decorated function evaldict = func.__globals__.copy() evaldict['_call_'] = caller evaldict['_func_'] = func return FunctionMaker.create( func, "return _call_(_func_, %(shortsignature)s)", evaldict, undecorated=func, __wrapped__=func) else: # returns a decorator if isinstance(caller, partial): return partial(decorator, caller) # otherwise assume caller is a function first = inspect.getargspec(caller)[0][0] # first arg evaldict = caller.__globals__.copy() evaldict['_call_'] = caller evaldict['decorator'] = decorator return FunctionMaker.create( '%s(%s)' % (caller.__name__, first), 'return decorator(_call_, %s)' % first, evaldict, undecorated=caller, __wrapped__=caller, doc=caller.__doc__, module=caller.__module__) networkx-1.8.1/networkx/external/decorator/decorator2/0000775000175000017500000000000012177457361023030 5ustar aricaric00000000000000networkx-1.8.1/networkx/external/decorator/decorator2/__init__.py0000664000175000017500000000000212177456333025127 0ustar aricaric00000000000000# networkx-1.8.1/networkx/external/decorator/decorator2/_decorator2.py0000664000175000017500000002115212177456333025604 0ustar aricaric00000000000000########################## LICENCE ############################### ## ## Copyright (c) 2005-2011, Michele Simionato ## All rights reserved. ## ## Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## Redistributions in bytecode form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in ## the documentation and/or other materials provided with the ## distribution. ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, ## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, ## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS ## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR ## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE ## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH ## DAMAGE. """ Decorator module, see http://pypi.python.org/pypi/decorator for the documentation. """ __version__ = '3.3.2' __all__ = ["decorator", "FunctionMaker", "partial"] import sys, re, inspect try: from functools import partial except ImportError: # for Python version < 2.5 class partial(object): "A simple replacement of functools.partial" def __init__(self, func, *args, **kw): self.func = func self.args = args self.keywords = kw def __call__(self, *otherargs, **otherkw): kw = self.keywords.copy() kw.update(otherkw) return self.func(*(self.args + otherargs), **kw) if sys.version >= '3': from inspect import getfullargspec else: class getfullargspec(object): "A quick and dirty replacement for getfullargspec for Python 2.X" def __init__(self, f): self.args, self.varargs, self.varkw, self.defaults = \ inspect.getargspec(f) self.kwonlyargs = [] self.kwonlydefaults = None self.annotations = getattr(f, '__annotations__', {}) def __iter__(self): yield self.args yield self.varargs yield self.varkw yield self.defaults DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(') # basic functionality class FunctionMaker(object): """ An object with the ability to create functions with a given signature. It has attributes name, doc, module, signature, defaults, dict and methods update and make. """ def __init__(self, func=None, name=None, signature=None, defaults=None, doc=None, module=None, funcdict=None): self.shortsignature = signature if func: # func can be a class or a callable, but not an instance method self.name = func.__name__ if self.name == '': # small hack for lambda functions self.name = '_lambda_' self.doc = func.__doc__ self.module = func.__module__ if inspect.isfunction(func): argspec = getfullargspec(func) for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs', 'kwonlydefaults', 'annotations'): setattr(self, a, getattr(argspec, a)) for i, arg in enumerate(self.args): setattr(self, 'arg%d' % i, arg) self.signature = inspect.formatargspec( formatvalue=lambda val: "", *argspec)[1:-1] allargs = list(self.args) if self.varargs: allargs.append('*' + self.varargs) if self.varkw: allargs.append('**' + self.varkw) try: self.shortsignature = ', '.join(allargs) except TypeError: # exotic signature, valid only in Python 2.X self.shortsignature = self.signature self.dict = func.__dict__.copy() # func=None happens when decorating a caller if name: self.name = name if signature is not None: self.signature = signature if defaults: self.defaults = defaults if doc: self.doc = doc if module: self.module = module if funcdict: self.dict = funcdict # check existence required attributes assert hasattr(self, 'name') if not hasattr(self, 'signature'): raise TypeError('You are decorating a non function: %s' % func) def update(self, func, **kw): "Update the signature of func with the data in self" func.__name__ = self.name func.__doc__ = getattr(self, 'doc', None) func.__dict__ = getattr(self, 'dict', {}) func.func_defaults = getattr(self, 'defaults', ()) func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None) callermodule = sys._getframe(3).f_globals.get('__name__', '?') func.__module__ = getattr(self, 'module', callermodule) func.__dict__.update(kw) def make(self, src_templ, evaldict=None, addsource=False, **attrs): "Make a new function from a given template and update the signature" src = src_templ % vars(self) # expand name and signature evaldict = evaldict or {} mo = DEF.match(src) if mo is None: raise SyntaxError('not a valid function template\n%s' % src) name = mo.group(1) # extract the function name names = set([name] + [arg.strip(' *') for arg in self.shortsignature.split(',')]) for n in names: if n in ('_func_', '_call_'): raise NameError('%s is overridden in\n%s' % (n, src)) if not src.endswith('\n'): # add a newline just for safety src += '\n' # this is needed in old versions of Python try: code = compile(src, '', 'single') # print >> sys.stderr, 'Compiling %s' % src exec code in evaldict except: print >> sys.stderr, 'Error in generated code:' print >> sys.stderr, src raise func = evaldict[name] if addsource: attrs['__source__'] = src self.update(func, **attrs) return func @classmethod def create(cls, obj, body, evaldict, defaults=None, doc=None, module=None, addsource=True, **attrs): """ Create a function from the strings name, signature and body. evaldict is the evaluation dictionary. If addsource is true an attribute __source__ is added to the result. The attributes attrs are added, if any. """ if isinstance(obj, str): # "name(signature)" name, rest = obj.strip().split('(', 1) signature = rest[:-1] #strip a right parens func = None else: # a function name = None signature = None func = obj self = cls(func, name, signature, defaults, doc, module) ibody = '\n'.join(' ' + line for line in body.splitlines()) return self.make('def %(name)s(%(signature)s):\n' + ibody, evaldict, addsource, **attrs) def decorator(caller, func=None): """ decorator(caller) converts a caller function into a decorator; decorator(caller, func) decorates a function using a caller. """ if func is not None: # returns a decorated function evaldict = func.func_globals.copy() evaldict['_call_'] = caller evaldict['_func_'] = func return FunctionMaker.create( func, "return _call_(_func_, %(shortsignature)s)", evaldict, undecorated=func, __wrapped__=func) else: # returns a decorator if isinstance(caller, partial): return partial(decorator, caller) # otherwise assume caller is a function first = inspect.getargspec(caller)[0][0] # first arg evaldict = caller.func_globals.copy() evaldict['_call_'] = caller evaldict['decorator'] = decorator return FunctionMaker.create( '%s(%s)' % (caller.__name__, first), 'return decorator(_call_, %s)' % first, evaldict, undecorated=caller, __wrapped__=caller, doc=caller.__doc__, module=caller.__module__) networkx-1.8.1/networkx/testing/0000775000175000017500000000000012177457361016635 5ustar aricaric00000000000000networkx-1.8.1/networkx/testing/__init__.py0000664000175000017500000000004512177456333020743 0ustar aricaric00000000000000from networkx.testing.utils import * networkx-1.8.1/networkx/testing/tests/0000775000175000017500000000000012177457361017777 5ustar aricaric00000000000000networkx-1.8.1/networkx/testing/tests/test_utils.py0000664000175000017500000000576212177456333022560 0ustar aricaric00000000000000from nose.tools import * import networkx as nx from networkx.testing import * # thanks to numpy for this GenericTest class (numpy/testing/test_utils.py) class _GenericTest(object): def _test_equal(self, a, b): self._assert_func(a, b) def _test_not_equal(self, a, b): try: self._assert_func(a, b) passed = True except AssertionError: pass else: raise AssertionError("a and b are found equal but are not") class TestNodesEqual(_GenericTest): def setUp(self): self._assert_func = assert_nodes_equal def test_nodes_equal(self): a = [1,2,5,4] b = [4,5,1,2] self._test_equal(a,b) def test_nodes_not_equal(self): a = [1,2,5,4] b = [4,5,1,3] self._test_not_equal(a,b) def test_nodes_with_data_equal(self): G = nx.Graph() G.add_nodes_from([1,2,3],color='red') H = nx.Graph() H.add_nodes_from([1,2,3],color='red') self._test_equal(G.nodes(data=True), H.nodes(data=True)) def test_edges_with_data_not_equal(self): G = nx.Graph() G.add_nodes_from([1,2,3],color='red') H = nx.Graph() H.add_nodes_from([1,2,3],color='blue') self._test_not_equal(G.nodes(data=True), H.nodes(data=True)) class TestEdgesEqual(_GenericTest): def setUp(self): self._assert_func = assert_edges_equal def test_edges_equal(self): a = [(1,2),(5,4)] b = [(4,5),(1,2)] self._test_equal(a,b) def test_edges_not_equal(self): a = [(1,2),(5,4)] b = [(4,5),(1,3)] self._test_not_equal(a,b) def test_edges_with_data_equal(self): G = nx.MultiGraph() G.add_path([0,1,2],weight=1) H = nx.MultiGraph() H.add_path([0,1,2],weight=1) self._test_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) def test_edges_with_data_not_equal(self): G = nx.MultiGraph() G.add_path([0,1,2],weight=1) H = nx.MultiGraph() H.add_path([0,1,2],weight=2) self._test_not_equal(G.edges(data=True, keys=True), H.edges(data=True, keys=True)) class TestGraphsEqual(_GenericTest): def setUp(self): self._assert_func = assert_graphs_equal def test_graphs_equal(self): G = nx.path_graph(4) H = nx.Graph() H.add_path(range(4)) H.name='path_graph(4)' self._test_equal(G,H) def test_graphs_not_equal(self): G = nx.path_graph(4) H = nx.Graph() H.add_cycle(range(4)) self._test_not_equal(G,H) def test_graphs_not_equal2(self): G = nx.path_graph(4) H = nx.Graph() H.add_path(range(3)) H.name='path_graph(4)' self._test_not_equal(G,H) def test_graphs_not_equal3(self): G = nx.path_graph(4) H = nx.Graph() H.add_path(range(4)) H.name='path_graph(foo)' self._test_not_equal(G,H) networkx-1.8.1/networkx/testing/utils.py0000664000175000017500000000367112177456333020354 0ustar aricaric00000000000000import operator from nose.tools import * __all__ = ['assert_nodes_equal', 'assert_edges_equal','assert_graphs_equal'] def assert_nodes_equal(nlist1, nlist2): # Assumes lists are either nodes, or (node,datadict) tuples, # and also that nodes are orderable/sortable. try: l = len(nlist1[0]) n1 = sorted(nlist1,key=operator.itemgetter(0)) n2 = sorted(nlist2,key=operator.itemgetter(0)) assert_equal(len(n1),len(n2)) for a,b in zip(n1,n2): assert_equal(a,b) except TypeError: assert_equal(set(nlist1),set(nlist2)) return def assert_edges_equal(elist1, elist2): # Assumes lists with u,v nodes either as # edge tuples (u,v) # edge tuples with data dicts (u,v,d) # edge tuples with keys and data dicts (u,v,k, d) # and also that nodes are orderable/sortable. e1 = sorted(elist1,key=lambda x: sorted(x[0:2])) e2 = sorted(elist2,key=lambda x: sorted(x[0:2])) assert_equal(len(e1),len(e2)) if len(e1) == 0: return True if len(e1[0]) == 2: for a,b in zip(e1,e2): assert_equal(set(a[0:2]),set(b[0:2])) elif len(e1[0]) == 3: for a,b in zip(e1,e2): assert_equal(set(a[0:2]),set(b[0:2])) assert_equal(a[2],b[2]) elif len(e1[0]) == 4: for a,b in zip(e1,e2): assert_equal(set(a[0:2]),set(b[0:2])) assert_equal(a[2],b[2]) assert_equal(a[3],b[3]) def assert_graphs_equal(graph1, graph2): if graph1.is_multigraph(): edges1 = graph1.edges(data=True,keys=True) else: edges1 = graph1.edges(data=True) if graph2.is_multigraph(): edges2 = graph2.edges(data=True,keys=True) else: edges2 = graph2.edges(data=True) assert_nodes_equal(graph1.nodes(data=True), graph2.nodes(data=True)) assert_edges_equal(edges1, edges2) assert_equal(graph1.graph,graph2.graph) return networkx-1.8.1/networkx/release.py0000664000175000017500000002113712177456604017155 0ustar aricaric00000000000000"""Release data for NetworkX. When NetworkX is imported a number of steps are followed to determine the version information. 1) If the release is not a development release (dev=False), then version information is read from version.py, a file containing statically defined version information. This file should exist on every downloadable release of NetworkX since setup.py creates it during packaging/installation. However, version.py might not exist if one is running NetworkX from the mercurial repository. In the event that version.py does not exist, then no vcs information will be available. 2) If the release is a development release, then version information is read dynamically, when possible. If no dynamic information can be read, then an attempt is made to read the information from version.py. If version.py does not exist, then no vcs information will be available. Clarification: version.py is created only by setup.py When setup.py creates version.py, it does so before packaging/installation. So the created file is included in the source distribution. When a user downloads a tar.gz file and extracts the files, the files will not be in a live version control repository. So when the user runs setup.py to install NetworkX, we must make sure write_versionfile() does not overwrite the revision information contained in the version.py that was included in the tar.gz file. This is why write_versionfile() includes an early escape. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from __future__ import absolute_import import os import sys import time import datetime import subprocess basedir = os.path.abspath(os.path.split(__file__)[0]) def write_versionfile(): """Creates a static file containing version information.""" versionfile = os.path.join(basedir, 'version.py') text = '''""" Version information for NetworkX, created during installation. Do not add this file to the repository. """ import datetime version = %(version)r date = %(date)r # Was NetworkX built from a development version? If so, remember that the major # and minor versions reference the "target" (rather than "current") release. dev = %(dev)r # Format: (name, major, min, revision) version_info = %(version_info)r # Format: a 'datetime.datetime' instance date_info = %(date_info)r # Format: (vcs, vcs_tuple) vcs_info = %(vcs_info)r ''' # Try to update all information date, date_info, version, version_info, vcs_info = get_info(dynamic=True) def writefile(): fh = open(versionfile, 'w') subs = { 'dev' : dev, 'version': version, 'version_info': version_info, 'date': date, 'date_info': date_info, 'vcs_info': vcs_info } fh.write(text % subs) fh.close() if vcs_info[0] == 'mercurial': # Then, we want to update version.py. writefile() else: if os.path.isfile(versionfile): # This is *good*, and the most likely place users will be when # running setup.py. We do not want to overwrite version.py. # Grab the version so that setup can use it. sys.path.insert(0, basedir) from version import version del sys.path[0] else: # This is *bad*. It means the user might have a tarball that # does not include version.py. Let this error raise so we can # fix the tarball. ##raise Exception('version.py not found!') # We no longer require that prepared tarballs include a version.py # So we use the possibly trunctated value from get_info() # Then we write a new file. writefile() return version def get_revision(): """Returns revision and vcs information, dynamically obtained.""" vcs, revision, tag = None, None, None hgdir = os.path.join(basedir, '..', '.hg') gitdir = os.path.join(basedir, '..', '.git') if os.path.isdir(hgdir): vcs = 'mercurial' try: p = subprocess.Popen(['hg', 'id'], cwd=basedir, stdout=subprocess.PIPE) except OSError: # Could not run hg, even though this is a mercurial repository. pass else: stdout = p.communicate()[0] # Force strings instead of unicode. x = list(map(str, stdout.decode().strip().split())) if len(x) == 0: # Somehow stdout was empty. This can happen, for example, # if you're running in a terminal which has redirected stdout. # In this case, we do not use any revision/tag info. pass elif len(x) == 1: # We don't have 'tip' or anything similar...so no tag. revision = str(x[0]) else: revision = str(x[0]) tag = str(x[1]) elif os.path.isdir(gitdir): vcs = 'git' # For now, we are not bothering with revision and tag. vcs_info = (vcs, (revision, tag)) return revision, vcs_info def get_info(dynamic=True): ## Date information date_info = datetime.datetime.now() date = time.asctime(date_info.timetuple()) revision, version, version_info, vcs_info = None, None, None, None import_failed = False dynamic_failed = False if dynamic: revision, vcs_info = get_revision() if revision is None: dynamic_failed = True if dynamic_failed or not dynamic: # This is where most final releases of NetworkX will be. # All info should come from version.py. If it does not exist, then # no vcs information will be provided. sys.path.insert(0, basedir) try: from version import date, date_info, version, version_info, vcs_info except ImportError: import_failed = True vcs_info = (None, (None, None)) else: revision = vcs_info[1][0] del sys.path[0] if import_failed or (dynamic and not dynamic_failed): # We are here if: # we failed to determine static versioning info, or # we successfully obtained dynamic revision info version = ''.join([str(major), '.', str(minor)]) if dev: version += '.dev_' + date_info.strftime("%Y%m%d%H%M%S") version_info = (name, major, minor, revision) return date, date_info, version, version_info, vcs_info ## Version information name = 'networkx' major = "1" minor = "8.1" ## Declare current release as a development release. ## Change to False before tagging a release; then change back. dev = False description = "Python package for creating and manipulating graphs and networks" long_description = \ """ NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. """ license = 'BSD' authors = {'Hagberg' : ('Aric Hagberg','hagberg@lanl.gov'), 'Schult' : ('Dan Schult','dschult@colgate.edu'), 'Swart' : ('Pieter Swart','swart@lanl.gov') } maintainer = "NetworkX Developers" maintainer_email = "networkx-discuss@googlegroups.com" url = 'http://networkx.lanl.gov/' download_url="http://networkx.lanl.gov/download/networkx" platforms = ['Linux','Mac OSX','Windows','Unix'] keywords = ['Networks', 'Graph Theory', 'Mathematics', 'network', 'graph', 'discrete mathematics', 'math'] classifiers = [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.1', 'Programming Language :: Python :: 3.2', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Scientific/Engineering :: Bio-Informatics', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Physics'] date, date_info, version, version_info, vcs_info = get_info() if __name__ == '__main__': # Write versionfile for nightly snapshots. write_versionfile() networkx-1.8.1/networkx/tests/0000775000175000017500000000000012177457361016322 5ustar aricaric00000000000000networkx-1.8.1/networkx/tests/__init__.py0000664000175000017500000000000012177456333020417 0ustar aricaric00000000000000networkx-1.8.1/networkx/tests/test_exceptions.py0000664000175000017500000000144412177456333022115 0ustar aricaric00000000000000from nose.tools import raises import networkx as nx # smoke tests for exceptions @raises(nx.NetworkXException) def test_raises_networkx_exception(): raise nx.NetworkXException @raises(nx.NetworkXError) def test_raises_networkx_error(): raise nx.NetworkXError @raises(nx.NetworkXPointlessConcept) def test_raises_networkx_pointless_concept(): raise nx.NetworkXPointlessConcept @raises(nx.NetworkXAlgorithmError) def test_raises_networkx_algorithm_error(): raise nx.NetworkXAlgorithmError @raises(nx.NetworkXUnfeasible) def test_raises_networkx_unfeasible(): raise nx.NetworkXUnfeasible @raises(nx.NetworkXNoPath) def test_raises_networkx_no_path(): raise nx.NetworkXNoPath @raises(nx.NetworkXUnbounded) def test_raises_networkx_unbounded(): raise nx.NetworkXUnbounded networkx-1.8.1/networkx/tests/test_relabel.py0000664000175000017500000001452412177456333021345 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from networkx import * from networkx.convert import * from networkx.algorithms.operators import * from networkx.generators.classic import barbell_graph,cycle_graph from networkx.testing import * class TestRelabel(): def test_convert_node_labels_to_integers(self): # test that empty graph converts fine for all options G=empty_graph() H=convert_node_labels_to_integers(G,100) assert_equal(H.name, '(empty_graph(0))_with_int_labels') assert_equal(H.nodes(), []) assert_equal(H.edges(), []) for opt in ["default", "sorted", "increasing degree", "decreasing degree"]: G=empty_graph() H=convert_node_labels_to_integers(G,100, ordering=opt) assert_equal(H.name, '(empty_graph(0))_with_int_labels') assert_equal(H.nodes(), []) assert_equal(H.edges(), []) G=empty_graph() G.add_edges_from([('A','B'),('A','C'),('B','C'),('C','D')]) G.name="paw" H=convert_node_labels_to_integers(G) degH=H.degree().values() degG=G.degree().values() assert_equal(sorted(degH), sorted(degG)) H=convert_node_labels_to_integers(G,1000) degH=H.degree().values() degG=G.degree().values() assert_equal(sorted(degH), sorted(degG)) assert_equal(H.nodes(), [1000, 1001, 1002, 1003]) H=convert_node_labels_to_integers(G,ordering="increasing degree") degH=H.degree().values() degG=G.degree().values() assert_equal(sorted(degH), sorted(degG)) assert_equal(degree(H,0), 1) assert_equal(degree(H,1), 2) assert_equal(degree(H,2), 2) assert_equal(degree(H,3), 3) H=convert_node_labels_to_integers(G,ordering="decreasing degree") degH=H.degree().values() degG=G.degree().values() assert_equal(sorted(degH), sorted(degG)) assert_equal(degree(H,0), 3) assert_equal(degree(H,1), 2) assert_equal(degree(H,2), 2) assert_equal(degree(H,3), 1) H=convert_node_labels_to_integers(G,ordering="increasing degree", label_attribute='label') degH=H.degree().values() degG=G.degree().values() assert_equal(sorted(degH), sorted(degG)) assert_equal(degree(H,0), 1) assert_equal(degree(H,1), 2) assert_equal(degree(H,2), 2) assert_equal(degree(H,3), 3) # check mapping assert_equal(H.node[3]['label'],'C') assert_equal(H.node[0]['label'],'D') assert_true(H.node[1]['label']=='A' or H.node[2]['label']=='A') assert_true(H.node[1]['label']=='B' or H.node[2]['label']=='B') def test_convert_to_integers2(self): G=empty_graph() G.add_edges_from([('C','D'),('A','B'),('A','C'),('B','C')]) G.name="paw" H=convert_node_labels_to_integers(G,ordering="sorted") degH=H.degree().values() degG=G.degree().values() assert_equal(sorted(degH), sorted(degG)) H=convert_node_labels_to_integers(G,ordering="sorted", label_attribute='label') assert_equal(H.node[0]['label'],'A') assert_equal(H.node[1]['label'],'B') assert_equal(H.node[2]['label'],'C') assert_equal(H.node[3]['label'],'D') @raises(nx.NetworkXError) def test_convert_to_integers_raise(self): G = nx.Graph() H=convert_node_labels_to_integers(G,ordering="increasing age") def test_relabel_nodes_copy(self): G=empty_graph() G.add_edges_from([('A','B'),('A','C'),('B','C'),('C','D')]) mapping={'A':'aardvark','B':'bear','C':'cat','D':'dog'} H=relabel_nodes(G,mapping) assert_equal(sorted(H.nodes()), ['aardvark', 'bear', 'cat', 'dog']) def test_relabel_nodes_function(self): G=empty_graph() G.add_edges_from([('A','B'),('A','C'),('B','C'),('C','D')]) # function mapping no longer encouraged but works def mapping(n): return ord(n) H=relabel_nodes(G,mapping) assert_equal(sorted(H.nodes()), [65, 66, 67, 68]) def test_relabel_nodes_graph(self): G=Graph([('A','B'),('A','C'),('B','C'),('C','D')]) mapping={'A':'aardvark','B':'bear','C':'cat','D':'dog'} H=relabel_nodes(G,mapping) assert_equal(sorted(H.nodes()), ['aardvark', 'bear', 'cat', 'dog']) def test_relabel_nodes_digraph(self): G=DiGraph([('A','B'),('A','C'),('B','C'),('C','D')]) mapping={'A':'aardvark','B':'bear','C':'cat','D':'dog'} H=relabel_nodes(G,mapping,copy=False) assert_equal(sorted(H.nodes()), ['aardvark', 'bear', 'cat', 'dog']) def test_relabel_nodes_multigraph(self): G=MultiGraph([('a','b'),('a','b')]) mapping={'a':'aardvark','b':'bear'} G=relabel_nodes(G,mapping,copy=False) assert_equal(sorted(G.nodes()), ['aardvark', 'bear']) assert_edges_equal(sorted(G.edges()), [('aardvark', 'bear'), ('aardvark', 'bear')]) def test_relabel_nodes_multidigraph(self): G=MultiDiGraph([('a','b'),('a','b')]) mapping={'a':'aardvark','b':'bear'} G=relabel_nodes(G,mapping,copy=False) assert_equal(sorted(G.nodes()), ['aardvark', 'bear']) assert_equal(sorted(G.edges()), [('aardvark', 'bear'), ('aardvark', 'bear')]) @raises(KeyError) def test_relabel_nodes_missing(self): G=Graph([('A','B'),('A','C'),('B','C'),('C','D')]) mapping={0:'aardvark'} G=relabel_nodes(G,mapping,copy=False) def test_relabel_toposort(self): K5=nx.complete_graph(4) G=nx.complete_graph(4) G=nx.relabel_nodes(G,dict( [(i,i+1) for i in range(4)]),copy=False) nx.is_isomorphic(K5,G) G=nx.complete_graph(4) G=nx.relabel_nodes(G,dict( [(i,i-1) for i in range(4)]),copy=False) nx.is_isomorphic(K5,G) def test_relabel_selfloop(self): G = nx.DiGraph([(1, 1), (1, 2), (2, 3)]) G = nx.relabel_nodes(G, {1: 'One', 2: 'Two', 3: 'Three'}, copy=False) assert_equal(sorted(G.nodes()),['One','Three','Two']) G = nx.MultiDiGraph([(1, 1), (1, 2), (2, 3)]) G = nx.relabel_nodes(G, {1: 'One', 2: 'Two', 3: 'Three'}, copy=False) assert_equal(sorted(G.nodes()),['One','Three','Two']) networkx-1.8.1/networkx/tests/benchmark.py0000664000175000017500000002461412177456333020633 0ustar aricaric00000000000000from timeit import Timer # This is gratefully modeled after the benchmarks found in # the numpy svn repository. http://svn.scipy.org/svn/numpy/trunk class Benchmark(object): """ Benchmark a method or simple bit of code using different Graph classes. If the test code is the same for each graph class, then you can set it during instantiation through the argument test_string. The argument test_string can also be a tuple of test code and setup code. The code is entered as a string valid for use with the timeit module. Example: >>> b=Benchmark(['Graph','XGraph']) >>> b['Graph']=('G.add_nodes_from(nlist)','nlist=range(100)') >>> b.run() """ def __init__(self,graph_classes,title='',test_string=None,runs=3,reps=1000): self.runs = runs self.reps = reps self.title = title self.class_tests = dict((gc,'') for gc in graph_classes) # set up the test string if it is the same for all classes. if test_string is not None: if isinstance(test_string,tuple): self['all']=test_string else: self['all']=(test_string,'') def __setitem__(self,graph_class,some_strs): """ Set a simple bit of code and setup string for the test. Use this for cases where the code differs from one class to another. """ test_str, setup_str = some_strs if graph_class == 'all': graph_class = self.class_tests.keys() elif not isinstance(graph_class,list): graph_class = [graph_class] for GC in graph_class: setup_string='import networkx as NX\nG=NX.%s.%s()\n'%\ (GC.lower(),GC) + setup_str self.class_tests[GC] = Timer(test_str, setup_string) def run(self): """Run the benchmark for each class and print results.""" column_len = max(len(G) for G in self.class_tests) print('='*72) if self.title: print("%s: %s runs, %s reps"% (self.title,self.runs,self.reps)) print('='*72) times=[] for GC,timer in self.class_tests.items(): name = GC.ljust(column_len) try: t=sum(timer.repeat(self.runs,self.reps))/self.runs # print "%s: %s" % (name, timer.repeat(self.runs,self.reps)) times.append((t,name)) except Exception as e: print("%s: Failed to benchmark (%s)." % (name,e)) times.sort() tmin=times[0][0] for t,name in times: print("%s: %5.2f %s" % (name, t/tmin*100.,t)) print('-'*72) print() if __name__ == "__main__": # set up for all routines: classes=['Graph','MultiGraph','DiGraph','MultiDiGraph'] all_tests=['add_nodes','add_edges','remove_nodes','remove_edges',\ 'neighbors','edges','degree','dijkstra','shortest path',\ 'subgraph','edgedata_subgraph','laplacian'] # Choose which tests to run tests=all_tests tests=['subgraph','edgedata_subgraph'] #tests=all_tests[-1:] N=100 if 'add_nodes' in tests: title='Benchmark: Adding nodes' test_string=('G.add_nodes_from(nlist)','nlist=range(%i)'%N) b=Benchmark(classes,title,test_string,runs=3,reps=1000) b.run() if 'add_edges' in tests: title='Benchmark: Adding edges' setup='elist=[(i,i+3) for i in range(%s-3)]\nG.add_nodes_from(range(%i))'%(N,N) test_string=('G.add_edges_from(elist)',setup) b=Benchmark(classes,title,test_string,runs=3,reps=1000) b.run() if 'remove_nodes' in tests: title='Benchmark: Adding and Deleting nodes' setup='nlist=range(%i)'%N test_string=('G.add_nodes_from(nlist)\nG.remove_nodes_from(nlist)',setup) b=Benchmark(classes,title,test_string,runs=3,reps=1000) b.run() if 'remove_edges' in tests: title='Benchmark: Adding and Deleting edges' setup='elist=[(i,i+3) for i in range(%s-3)]'%N test_string=('G.add_edges_from(elist)\nG.remove_edges_from(elist)',setup) b=Benchmark(classes,title,test_string,runs=3,reps=1000) b.run() if 'neighbors' in tests: N=500 p=0.3 title='Benchmark: reporting neighbors' b=Benchmark(classes,title,runs=3,reps=1) test_string='for n in G:\n for nbr in G.neighbors(n):\n pass' all_setup='H=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)\n' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'edges' in tests: N=500 p=0.3 title='Benchmark: reporting edges' b=Benchmark(classes,title,runs=3,reps=1) test_string='for n in G:\n for e in G.edges(n):\n pass' all_setup='H=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)\n' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'degree' in tests: N=500 p=0.3 title='Benchmark: reporting degree' b=Benchmark(classes,title,runs=3,reps=1) test_string='for d in G.degree():\n pass' all_setup='H=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)\n' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'dijkstra' in tests: N=500 p=0.3 title='dijkstra single source shortest path' b=Benchmark(classes,title,runs=3,reps=1) test_string='p=NX.single_source_dijkstra(G,i)' all_setup='i=6\nH=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'shortest path' in tests: N=500 p=0.3 title='single source shortest path' b=Benchmark(classes,title,runs=3,reps=1) test_string='p=NX.single_source_shortest_path(G,i)' all_setup='i=6\nH=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'subgraph' in tests: N=500 p=0.3 title='subgraph method' b=Benchmark(classes,title,runs=3,reps=1) test_string='G.subgraph(nlist)' all_setup='nlist=range(100,150)\nH=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'edgedata_subgraph' in tests: N=500 p=0.3 title='subgraph method with edge data present' b=Benchmark(classes,title,runs=3,reps=1) test_string='G.subgraph(nlist)' all_setup='nlist=range(100,150)\nH=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v,hi=3)' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)],hi=2)' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v,hi=1)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)],hi=2)' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() if 'laplacian' in tests: N=500 p=0.3 title='creation of laplacian matrix' b=Benchmark(classes,title,runs=3,reps=1) test_string='NX.laplacian(G)' all_setup='H=NX.binomial_graph(%s,%s)\nfor (u,v) in H.edges_iter():\n '%(N,p) setup=all_setup+'G.add_edge(u,v)' if 'Graph' in classes: b['Graph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'DiGraph' in classes: b['DiGraph']=(test_string,setup) setup=all_setup+'G.add_edge(u,v)' if 'MultiGraph' in classes: b['MultiGraph']=(test_string,setup) setup=all_setup+'G.add_edges_from([(u,v),(v,u)])' if 'MultiDiGraph' in classes: b['MultiDiGraph']=(test_string,setup) b.run() networkx-1.8.1/networkx/tests/test_convert.py0000664000175000017500000002300012177456333021404 0ustar aricaric00000000000000#!/usr/bin/env python """Convert ======= """ from nose.tools import * from networkx import * from networkx.convert import * from networkx.algorithms.operators import * from networkx.generators.classic import barbell_graph,cycle_graph class TestConvert(): def edgelists_equal(self,e1,e2): return sorted(sorted(e) for e in e1)==sorted(sorted(e) for e in e2) def test_simple_graphs(self): for dest, source in [(to_dict_of_dicts, from_dict_of_dicts), (to_dict_of_lists, from_dict_of_lists)]: G=barbell_graph(10,3) dod=dest(G) # Dict of [dicts, lists] GG=source(dod) assert_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_equal(sorted(G.edges()), sorted(GG.edges())) GW=to_networkx_graph(dod) assert_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_equal(sorted(G.edges()), sorted(GW.edges())) GI=Graph(dod) assert_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_equal(sorted(G.edges()), sorted(GI.edges())) # With nodelist keyword P4=path_graph(4) P3=path_graph(3) dod=dest(P4,nodelist=[0,1,2]) Gdod=Graph(dod) assert_equal(sorted(Gdod.nodes()), sorted(P3.nodes())) assert_equal(sorted(Gdod.edges()), sorted(P3.edges())) def test_digraphs(self): for dest, source in [(to_dict_of_dicts, from_dict_of_dicts), (to_dict_of_lists, from_dict_of_lists)]: G=cycle_graph(10) # Dict of [dicts, lists] dod=dest(G) GG=source(dod) assert_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_equal(sorted(G.edges()), sorted(GG.edges())) GW=to_networkx_graph(dod) assert_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_equal(sorted(G.edges()), sorted(GW.edges())) GI=Graph(dod) assert_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_equal(sorted(G.edges()), sorted(GI.edges())) G=cycle_graph(10,create_using=DiGraph()) dod=dest(G) GG=source(dod, create_using=DiGraph()) assert_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_equal(sorted(G.edges()), sorted(GG.edges())) GW=to_networkx_graph(dod, create_using=DiGraph()) assert_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_equal(sorted(G.edges()), sorted(GW.edges())) GI=DiGraph(dod) assert_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_equal(sorted(G.edges()), sorted(GI.edges())) def test_graph(self): G=cycle_graph(10) e=G.edges() source=[u for u,v in e] dest=[v for u,v in e] ex=zip(source,dest,source) G=Graph() G.add_weighted_edges_from(ex) # Dict of dicts dod=to_dict_of_dicts(G) GG=from_dict_of_dicts(dod,create_using=Graph()) assert_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_equal(sorted(G.edges()), sorted(GG.edges())) GW=to_networkx_graph(dod,create_using=Graph()) assert_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_equal(sorted(G.edges()), sorted(GW.edges())) GI=Graph(dod) assert_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_equal(sorted(G.edges()), sorted(GI.edges())) # Dict of lists dol=to_dict_of_lists(G) GG=from_dict_of_lists(dol,create_using=Graph()) # dict of lists throws away edge data so set it to none enone=[(u,v,{}) for (u,v,d) in G.edges(data=True)] assert_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_equal(enone, sorted(GG.edges(data=True))) GW=to_networkx_graph(dol,create_using=Graph()) assert_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_equal(enone, sorted(GW.edges(data=True))) GI=Graph(dol) assert_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_equal(enone, sorted(GI.edges(data=True))) def test_with_multiedges_self_loops(self): G=cycle_graph(10) e=G.edges() source,dest = list(zip(*e)) ex=list(zip(source,dest,source)) XG=Graph() XG.add_weighted_edges_from(ex) XGM=MultiGraph() XGM.add_weighted_edges_from(ex) XGM.add_edge(0,1,weight=2) # multiedge XGS=Graph() XGS.add_weighted_edges_from(ex) XGS.add_edge(0,0,weight=100) # self loop # Dict of dicts # with self loops, OK dod=to_dict_of_dicts(XGS) GG=from_dict_of_dicts(dod,create_using=Graph()) assert_equal(sorted(XGS.nodes()), sorted(GG.nodes())) assert_equal(sorted(XGS.edges()), sorted(GG.edges())) GW=to_networkx_graph(dod,create_using=Graph()) assert_equal(sorted(XGS.nodes()), sorted(GW.nodes())) assert_equal(sorted(XGS.edges()), sorted(GW.edges())) GI=Graph(dod) assert_equal(sorted(XGS.nodes()), sorted(GI.nodes())) assert_equal(sorted(XGS.edges()), sorted(GI.edges())) # Dict of lists # with self loops, OK dol=to_dict_of_lists(XGS) GG=from_dict_of_lists(dol,create_using=Graph()) # dict of lists throws away edge data so set it to none enone=[(u,v,{}) for (u,v,d) in XGS.edges(data=True)] assert_equal(sorted(XGS.nodes()), sorted(GG.nodes())) assert_equal(enone, sorted(GG.edges(data=True))) GW=to_networkx_graph(dol,create_using=Graph()) assert_equal(sorted(XGS.nodes()), sorted(GW.nodes())) assert_equal(enone, sorted(GW.edges(data=True))) GI=Graph(dol) assert_equal(sorted(XGS.nodes()), sorted(GI.nodes())) assert_equal(enone, sorted(GI.edges(data=True))) # Dict of dicts # with multiedges, OK dod=to_dict_of_dicts(XGM) GG=from_dict_of_dicts(dod,create_using=MultiGraph(), multigraph_input=True) assert_equal(sorted(XGM.nodes()), sorted(GG.nodes())) assert_equal(sorted(XGM.edges()), sorted(GG.edges())) GW=to_networkx_graph(dod,create_using=MultiGraph(),multigraph_input=True) assert_equal(sorted(XGM.nodes()), sorted(GW.nodes())) assert_equal(sorted(XGM.edges()), sorted(GW.edges())) GI=MultiGraph(dod) # convert can't tell whether to duplicate edges! assert_equal(sorted(XGM.nodes()), sorted(GI.nodes())) #assert_not_equal(sorted(XGM.edges()), sorted(GI.edges())) assert_false(sorted(XGM.edges()) == sorted(GI.edges())) GE=from_dict_of_dicts(dod,create_using=MultiGraph(), multigraph_input=False) assert_equal(sorted(XGM.nodes()), sorted(GE.nodes())) assert_not_equal(sorted(XGM.edges()), sorted(GE.edges())) GI=MultiGraph(XGM) assert_equal(sorted(XGM.nodes()), sorted(GI.nodes())) assert_equal(sorted(XGM.edges()), sorted(GI.edges())) GM=MultiGraph(G) assert_equal(sorted(GM.nodes()), sorted(G.nodes())) assert_equal(sorted(GM.edges()), sorted(G.edges())) # Dict of lists # with multiedges, OK, but better write as DiGraph else you'll # get double edges dol=to_dict_of_lists(G) GG=from_dict_of_lists(dol,create_using=MultiGraph()) assert_equal(sorted(G.nodes()), sorted(GG.nodes())) assert_equal(sorted(G.edges()), sorted(GG.edges())) GW=to_networkx_graph(dol,create_using=MultiGraph()) assert_equal(sorted(G.nodes()), sorted(GW.nodes())) assert_equal(sorted(G.edges()), sorted(GW.edges())) GI=MultiGraph(dol) assert_equal(sorted(G.nodes()), sorted(GI.nodes())) assert_equal(sorted(G.edges()), sorted(GI.edges())) def test_edgelists(self): P=path_graph(4) e=[(0,1),(1,2),(2,3)] G=Graph(e) assert_equal(sorted(G.nodes()), sorted(P.nodes())) assert_equal(sorted(G.edges()), sorted(P.edges())) assert_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True))) e=[(0,1,{}),(1,2,{}),(2,3,{})] G=Graph(e) assert_equal(sorted(G.nodes()), sorted(P.nodes())) assert_equal(sorted(G.edges()), sorted(P.edges())) assert_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True))) e=((n,n+1) for n in range(3)) G=Graph(e) assert_equal(sorted(G.nodes()), sorted(P.nodes())) assert_equal(sorted(G.edges()), sorted(P.edges())) assert_equal(sorted(G.edges(data=True)), sorted(P.edges(data=True))) def test_directed_to_undirected(self): edges1 = [(0, 1), (1, 2), (2, 0)] edges2 = [(0, 1), (1, 2), (0, 2)] assert_true(self.edgelists_equal(nx.Graph(nx.DiGraph(edges1)).edges(),edges1)) assert_true(self.edgelists_equal(nx.Graph(nx.DiGraph(edges2)).edges(),edges1)) assert_true(self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges1)).edges(),edges1)) assert_true(self.edgelists_equal(nx.MultiGraph(nx.DiGraph(edges2)).edges(),edges1)) assert_true(self.edgelists_equal(nx.MultiGraph(nx.MultiDiGraph(edges1)).edges(), edges1)) assert_true(self.edgelists_equal(nx.MultiGraph(nx.MultiDiGraph(edges2)).edges(), edges1)) assert_true(self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges1)).edges(),edges1)) assert_true(self.edgelists_equal(nx.Graph(nx.MultiDiGraph(edges2)).edges(),edges1)) networkx-1.8.1/networkx/tests/test.py0000664000175000017500000000233412177456333017653 0ustar aricaric00000000000000#!/usr/bin/env python import sys from os import path,getcwd def run(verbosity=1,doctest=False,numpy=True): """Run NetworkX tests. Parameters ---------- verbosity: integer, optional Level of detail in test reports. Higher numbers provide more detail. doctest: bool, optional True to run doctests in code modules numpy: bool, optional True to test modules dependent on numpy """ try: import nose except ImportError: raise ImportError(\ "The nose package is needed to run the NetworkX tests.") sys.stderr.write("Running NetworkX tests:") nx_install_dir=path.join(path.dirname(__file__), path.pardir) # stop if running from source directory if getcwd() == path.abspath(path.join(nx_install_dir,path.pardir)): raise RuntimeError("Can't run tests from source directory.\n" "Run 'nosetests' from the command line.") argv=[' ','--verbosity=%d'%verbosity, '-w',nx_install_dir, '-exe'] if doctest: argv.extend(['--with-doctest','--doctest-extension=txt']) if not numpy: argv.extend(['-A not numpy']) nose.run(argv=argv) if __name__=="__main__": run() networkx-1.8.1/networkx/tests/test_convert_scipy.py0000664000175000017500000001457112177456333022630 0ustar aricaric00000000000000from nose import SkipTest from nose.tools import assert_raises, assert_true, assert_equal, raises import networkx as nx from networkx.generators.classic import barbell_graph,cycle_graph,path_graph class TestConvertNumpy(object): @classmethod def setupClass(cls): global np, sp, sparse, np_assert_equal try: import numpy as np import scipy as sp import scipy.sparse as sparse np_assert_equal=np.testing.assert_equal except ImportError: raise SkipTest('SciPy sparse library not available.') def __init__(self): self.G1 = barbell_graph(10, 3) self.G2 = cycle_graph(10, create_using=nx.DiGraph()) self.G3 = self.create_weighted(nx.Graph()) self.G4 = self.create_weighted(nx.DiGraph()) def create_weighted(self, G): g = cycle_graph(4) e = g.edges() source = [u for u,v in e] dest = [v for u,v in e] weight = [s+10 for s in source] ex = zip(source, dest, weight) G.add_weighted_edges_from(ex) return G def assert_equal(self, G1, G2): assert_true( sorted(G1.nodes())==sorted(G2.nodes()) ) assert_true( sorted(G1.edges())==sorted(G2.edges()) ) def identity_conversion(self, G, A, create_using): GG = nx.from_scipy_sparse_matrix(A, create_using=create_using) self.assert_equal(G, GG) GW = nx.to_networkx_graph(A, create_using=create_using) self.assert_equal(G, GW) GI = create_using.__class__(A) self.assert_equal(G, GI) ACSR = A.tocsr() GI = create_using.__class__(ACSR) self.assert_equal(G, GI) ACOO = A.tocoo() GI = create_using.__class__(ACOO) self.assert_equal(G, GI) ACSC = A.tocsc() GI = create_using.__class__(ACSC) self.assert_equal(G, GI) AD = A.todense() GI = create_using.__class__(AD) self.assert_equal(G, GI) AA = A.toarray() GI = create_using.__class__(AA) self.assert_equal(G, GI) def test_shape(self): "Conversion from non-square sparse array." A = sp.sparse.lil_matrix([[1,2,3],[4,5,6]]) assert_raises(nx.NetworkXError, nx.from_scipy_sparse_matrix, A) def test_identity_graph_matrix(self): "Conversion from graph to sparse matrix to graph." A = nx.to_scipy_sparse_matrix(self.G1) self.identity_conversion(self.G1, A, nx.Graph()) def test_identity_digraph_matrix(self): "Conversion from digraph to sparse matrix to digraph." A = nx.to_scipy_sparse_matrix(self.G2) self.identity_conversion(self.G2, A, nx.DiGraph()) def test_identity_weighted_graph_matrix(self): """Conversion from weighted graph to sparse matrix to weighted graph.""" A = nx.to_scipy_sparse_matrix(self.G3) self.identity_conversion(self.G3, A, nx.Graph()) def test_identity_weighted_digraph_matrix(self): """Conversion from weighted digraph to sparse matrix to weighted digraph.""" A = nx.to_scipy_sparse_matrix(self.G4) self.identity_conversion(self.G4, A, nx.DiGraph()) def test_nodelist(self): """Conversion from graph to sparse matrix to graph with nodelist.""" P4 = path_graph(4) P3 = path_graph(3) nodelist = P3.nodes() A = nx.to_scipy_sparse_matrix(P4, nodelist=nodelist) GA = nx.Graph(A) self.assert_equal(GA, P3) # Make nodelist ambiguous by containing duplicates. nodelist += [nodelist[0]] assert_raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist) def test_weight_keyword(self): WP4 = nx.Graph() WP4.add_edges_from( (n,n+1,dict(weight=0.5,other=0.3)) for n in range(3) ) P4 = path_graph(4) A = nx.to_scipy_sparse_matrix(P4) np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) np_assert_equal(0.5*A.todense(), nx.to_scipy_sparse_matrix(WP4).todense()) np_assert_equal(0.3*A.todense(), nx.to_scipy_sparse_matrix(WP4,weight='other').todense()) def test_format_keyword(self): WP4 = nx.Graph() WP4.add_edges_from( (n,n+1,dict(weight=0.5,other=0.3)) for n in range(3) ) P4 = path_graph(4) A = nx.to_scipy_sparse_matrix(P4, format='csr') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) A = nx.to_scipy_sparse_matrix(P4, format='csc') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) A = nx.to_scipy_sparse_matrix(P4, format='coo') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) A = nx.to_scipy_sparse_matrix(P4, format='bsr') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) A = nx.to_scipy_sparse_matrix(P4, format='lil') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) A = nx.to_scipy_sparse_matrix(P4, format='dia') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) A = nx.to_scipy_sparse_matrix(P4, format='dok') np_assert_equal(A.todense(), nx.to_scipy_sparse_matrix(WP4,weight=None).todense()) @raises(nx.NetworkXError) def test_format_keyword_fail(self): WP4 = nx.Graph() WP4.add_edges_from( (n,n+1,dict(weight=0.5,other=0.3)) for n in range(3) ) P4 = path_graph(4) nx.to_scipy_sparse_matrix(P4, format='any_other') @raises(nx.NetworkXError) def test_null_fail(self): nx.to_scipy_sparse_matrix(nx.Graph()) def test_empty(self): G = nx.Graph() G.add_node(1) M = nx.to_scipy_sparse_matrix(G) np_assert_equal(M.todense(), np.matrix([[0]])) def test_ordering(self): G = nx.DiGraph() G.add_edge(1,2) G.add_edge(2,3) G.add_edge(3,1) M = nx.to_scipy_sparse_matrix(G,nodelist=[3,2,1]) np_assert_equal(M.todense(), np.matrix([[0,0,1],[1,0,0],[0,1,0]])) networkx-1.8.1/networkx/tests/test_convert_numpy.py0000664000175000017500000001425012177456333022643 0ustar aricaric00000000000000from nose import SkipTest from nose.tools import assert_raises, assert_true, assert_equal import networkx as nx from networkx.generators.classic import barbell_graph,cycle_graph,path_graph class TestConvertNumpy(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np global np_assert_equal try: import numpy as np np_assert_equal=np.testing.assert_equal except ImportError: raise SkipTest('NumPy not available.') def __init__(self): self.G1 = barbell_graph(10, 3) self.G2 = cycle_graph(10, create_using=nx.DiGraph()) self.G3 = self.create_weighted(nx.Graph()) self.G4 = self.create_weighted(nx.DiGraph()) def create_weighted(self, G): g = cycle_graph(4) e = g.edges() source = [u for u,v in e] dest = [v for u,v in e] weight = [s+10 for s in source] ex = zip(source, dest, weight) G.add_weighted_edges_from(ex) return G def assert_equal(self, G1, G2): assert_true( sorted(G1.nodes())==sorted(G2.nodes()) ) assert_true( sorted(G1.edges())==sorted(G2.edges()) ) def identity_conversion(self, G, A, create_using): GG = nx.from_numpy_matrix(A, create_using=create_using) self.assert_equal(G, GG) GW = nx.to_networkx_graph(A, create_using=create_using) self.assert_equal(G, GW) GI = create_using.__class__(A) self.assert_equal(G, GI) def test_shape(self): "Conversion from non-square array." A=np.array([[1,2,3],[4,5,6]]) assert_raises(nx.NetworkXError, nx.from_numpy_matrix, A) def test_identity_graph_matrix(self): "Conversion from graph to matrix to graph." A = nx.to_numpy_matrix(self.G1) self.identity_conversion(self.G1, A, nx.Graph()) def test_identity_graph_array(self): "Conversion from graph to array to graph." A = nx.to_numpy_matrix(self.G1) A = np.asarray(A) self.identity_conversion(self.G1, A, nx.Graph()) def test_identity_digraph_matrix(self): """Conversion from digraph to matrix to digraph.""" A = nx.to_numpy_matrix(self.G2) self.identity_conversion(self.G2, A, nx.DiGraph()) def test_identity_digraph_array(self): """Conversion from digraph to array to digraph.""" A = nx.to_numpy_matrix(self.G2) A = np.asarray(A) self.identity_conversion(self.G2, A, nx.DiGraph()) def test_identity_weighted_graph_matrix(self): """Conversion from weighted graph to matrix to weighted graph.""" A = nx.to_numpy_matrix(self.G3) self.identity_conversion(self.G3, A, nx.Graph()) def test_identity_weighted_graph_array(self): """Conversion from weighted graph to array to weighted graph.""" A = nx.to_numpy_matrix(self.G3) A = np.asarray(A) self.identity_conversion(self.G3, A, nx.Graph()) def test_identity_weighted_digraph_matrix(self): """Conversion from weighted digraph to matrix to weighted digraph.""" A = nx.to_numpy_matrix(self.G4) self.identity_conversion(self.G4, A, nx.DiGraph()) def test_identity_weighted_digraph_array(self): """Conversion from weighted digraph to array to weighted digraph.""" A = nx.to_numpy_matrix(self.G4) A = np.asarray(A) self.identity_conversion(self.G4, A, nx.DiGraph()) def test_nodelist(self): """Conversion from graph to matrix to graph with nodelist.""" P4 = path_graph(4) P3 = path_graph(3) nodelist = P3.nodes() A = nx.to_numpy_matrix(P4, nodelist=nodelist) GA = nx.Graph(A) self.assert_equal(GA, P3) # Make nodelist ambiguous by containing duplicates. nodelist += [nodelist[0]] assert_raises(nx.NetworkXError, nx.to_numpy_matrix, P3, nodelist=nodelist) def test_weight_keyword(self): WP4 = nx.Graph() WP4.add_edges_from( (n,n+1,dict(weight=0.5,other=0.3)) for n in range(3) ) P4 = path_graph(4) A = nx.to_numpy_matrix(P4) np_assert_equal(A, nx.to_numpy_matrix(WP4,weight=None)) np_assert_equal(0.5*A, nx.to_numpy_matrix(WP4)) np_assert_equal(0.3*A, nx.to_numpy_matrix(WP4,weight='other')) def test_from_numpy_matrix_type(self): A=np.matrix([[1]]) G=nx.from_numpy_matrix(A) assert_equal(type(G[0][0]['weight']),int) A=np.matrix([[1]]).astype(np.float) G=nx.from_numpy_matrix(A) assert_equal(type(G[0][0]['weight']),float) A=np.matrix([[1]]).astype(np.str) G=nx.from_numpy_matrix(A) assert_equal(type(G[0][0]['weight']),str) A=np.matrix([[1]]).astype(np.bool) G=nx.from_numpy_matrix(A) assert_equal(type(G[0][0]['weight']),bool) A=np.matrix([[1]]).astype(np.complex) G=nx.from_numpy_matrix(A) assert_equal(type(G[0][0]['weight']),complex) A=np.matrix([[1]]).astype(np.object) assert_raises(TypeError,nx.from_numpy_matrix,A) def test_from_numpy_matrix_dtype(self): dt=[('weight',float),('cost',int)] A=np.matrix([[(1.0,2)]],dtype=dt) G=nx.from_numpy_matrix(A) assert_equal(type(G[0][0]['weight']),float) assert_equal(type(G[0][0]['cost']),int) assert_equal(G[0][0]['cost'],2) assert_equal(G[0][0]['weight'],1.0) def test_to_numpy_recarray(self): G=nx.Graph() G.add_edge(1,2,weight=7.0,cost=5) A=nx.to_numpy_recarray(G,dtype=[('weight',float),('cost',int)]) assert_equal(sorted(A.dtype.names),['cost','weight']) assert_equal(A.weight[0,1],7.0) assert_equal(A.weight[0,0],0.0) assert_equal(A.cost[0,1],5) assert_equal(A.cost[0,0],0) def test_numpy_multigraph(self): G=nx.MultiGraph() G.add_edge(1,2,weight=7) G.add_edge(1,2,weight=70) A=nx.to_numpy_matrix(G) assert_equal(A[1,0],77) A=nx.to_numpy_matrix(G,multigraph_weight=min) assert_equal(A[1,0],7) A=nx.to_numpy_matrix(G,multigraph_weight=max) assert_equal(A[1,0],70) networkx-1.8.1/networkx/classes/0000775000175000017500000000000012177457361016615 5ustar aricaric00000000000000networkx-1.8.1/networkx/classes/multigraph.py0000664000175000017500000007454612177456333021361 0ustar aricaric00000000000000"""Base class for MultiGraph.""" # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from copy import deepcopy import networkx as nx from networkx.classes.graph import Graph from networkx import NetworkXError __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) class MultiGraph(Graph): """ An undirected graph class that can store multiedges. Multiedges are multiple edges between two nodes. Each edge can hold optional data or attributes. A MultiGraph holds undirected edges. Self loops are allowed. Nodes can be arbitrary (hashable) Python objects with optional key/value attributes. Edges are represented as links between nodes with optional key/value attributes. Parameters ---------- data : input graph Data to initialize graph. If data=None (default) an empty graph is created. The data can be an edge list, or any NetworkX graph object. If the corresponding optional Python packages are installed the data can also be a NumPy matrix or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to graph as key=value pairs. See Also -------- Graph DiGraph MultiDiGraph Examples -------- Create an empty graph structure (a "null graph") with no nodes and no edges. >>> G = nx.MultiGraph() G can be grown in several ways. **Nodes:** Add one node at a time: >>> G.add_node(1) Add the nodes from any container (a list, dict, set or even the lines from a file or the nodes from another graph). >>> G.add_nodes_from([2,3]) >>> G.add_nodes_from(range(100,110)) >>> H=nx.Graph() >>> H.add_path([0,1,2,3,4,5,6,7,8,9]) >>> G.add_nodes_from(H) In addition to strings and integers any hashable Python object (except None) can represent a node, e.g. a customized node object, or even another Graph. >>> G.add_node(H) **Edges:** G can also be grown by adding edges. Add one edge, >>> G.add_edge(1, 2) a list of edges, >>> G.add_edges_from([(1,2),(1,3)]) or a collection of edges, >>> G.add_edges_from(H.edges()) If some edges connect nodes not yet in the graph, the nodes are added automatically. If an edge already exists, an additional edge is created and stored using a key to identify the edge. By default the key is the lowest unused integer. >>> G.add_edges_from([(4,5,dict(route=282)), (4,5,dict(route=37))]) >>> G[4] {3: {0: {}}, 5: {0: {}, 1: {'route': 282}, 2: {'route': 37}}} **Attributes:** Each graph, node, and edge can hold key/value attribute pairs in an associated attribute dictionary (the keys must be hashable). By default these are empty, but can be added or changed using add_edge, add_node or direct manipulation of the attribute dictionaries named graph, node and edge respectively. >>> G = nx.MultiGraph(day="Friday") >>> G.graph {'day': 'Friday'} Add node attributes using add_node(), add_nodes_from() or G.node >>> G.add_node(1, time='5pm') >>> G.add_nodes_from([3], time='2pm') >>> G.node[1] {'time': '5pm'} >>> G.node[1]['room'] = 714 >>> del G.node[1]['room'] # remove attribute >>> G.nodes(data=True) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Warning: adding a node to G.node does not add it to the graph. Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edge. >>> G.add_edge(1, 2, weight=4.7 ) >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) >>> G[1][2][0]['weight'] = 4.7 >>> G.edge[1][2][0]['weight'] = 4 **Shortcuts:** Many common graph features allow python syntax to speed reporting. >>> 1 in G # check if node in graph True >>> [n for n in G if n<3] # iterate through nodes [1, 2] >>> len(G) # number of nodes in graph 5 >>> G[1] # adjacency dict keyed by neighbor to edge attributes ... # Note: you should not change this dict manually! {2: {0: {'weight': 4}, 1: {'color': 'blue'}}} The fastest way to traverse all edges of a graph is via adjacency_iter(), but the edges() method is often more convenient. >>> for n,nbrsdict in G.adjacency_iter(): ... for nbr,keydict in nbrsdict.items(): ... for key,eattr in keydict.items(): ... if 'weight' in eattr: ... (n,nbr,eattr['weight']) (1, 2, 4) (2, 1, 4) (2, 3, 8) (3, 2, 8) >>> [ (u,v,edata['weight']) for u,v,edata in G.edges(data=True) if 'weight' in edata ] [(1, 2, 4), (2, 3, 8)] **Reporting:** Simple graph information is obtained using methods. Iterator versions of many reporting methods exist for efficiency. Methods exist for reporting nodes(), edges(), neighbors() and degree() as well as the number of nodes and edges. For details on these and other miscellaneous methods, see below. """ def add_edge(self, u, v, key=None, attr_dict=None, **attr): """Add an edge between u and v. The nodes u and v will be automatically added if they are not already in the graph. Edge attributes can be specified with keywords or by providing a dictionary with key/value pairs. See examples below. Parameters ---------- u,v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. key : hashable identifier, optional (default=lowest unused integer) Used to distinguish multiedges between a pair of nodes. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with the edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edges_from : add a collection of edges Notes ----- To replace/update edge data, use the optional key argument to identify a unique edge. Otherwise a new edge will be created. NetworkX algorithms designed for weighted graphs cannot use multigraphs directly because it is not clear how to handle multiedge weights. Convert to Graph using edge attribute 'weight' to enable weighted graph algorithms. Examples -------- The following all add the edge e=(1,2) to graph G: >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> e = (1,2) >>> G.add_edge(1, 2) # explicit two-node form >>> G.add_edge(*e) # single edge as tuple of two nodes >>> G.add_edges_from( [(1,2)] ) # add edges from iterable container Associate data to edges using keywords: >>> G.add_edge(1, 2, weight=3) >>> G.add_edge(1, 2, key=0, weight=4) # update data for key=0 >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") # add nodes if u not in self.adj: self.adj[u] = {} self.node[u] = {} if v not in self.adj: self.adj[v] = {} self.node[v] = {} if v in self.adj[u]: keydict=self.adj[u][v] if key is None: # find a unique integer key # other methods might be better here? key=len(keydict) while key in keydict: key+=1 datadict=keydict.get(key,{}) datadict.update(attr_dict) keydict[key]=datadict else: # selfloops work this way without special treatment if key is None: key=0 datadict={} datadict.update(attr_dict) keydict={key:datadict} self.adj[u][v] = keydict self.adj[v][u] = keydict def add_edges_from(self, ebunch, attr_dict=None, **attr): """Add all the edges in ebunch. Parameters ---------- ebunch : container of edges Each edge given in the container will be added to the graph. The edges can be: - 2-tuples (u,v) or - 3-tuples (u,v,d) for an edge attribute dict d, or - 4-tuples (u,v,k,d) for an edge identified by key k attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with each edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edge : add a single edge add_weighted_edges_from : convenient way to add weighted edges Notes ----- Adding the same edge twice has no effect but any edge data will be updated when each duplicate edge is added. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples >>> e = zip(range(0,3),range(1,4)) >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1,2),(2,3)], weight=3) >>> G.add_edges_from([(3,4),(1,4)], label='WN2898') """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") # process ebunch for e in ebunch: ne=len(e) if ne==4: u,v,key,dd = e elif ne==3: u,v,dd = e key=None elif ne==2: u,v = e dd = {} key=None else: raise NetworkXError(\ "Edge tuple %s must be a 2-tuple, 3-tuple or 4-tuple."%(e,)) if u in self.adj: keydict=self.adj[u].get(v,{}) else: keydict={} if key is None: # find a unique integer key # other methods might be better here? key=len(keydict) while key in keydict: key+=1 datadict=keydict.get(key,{}) datadict.update(attr_dict) datadict.update(dd) self.add_edge(u,v,key=key,attr_dict=datadict) def remove_edge(self, u, v, key=None): """Remove an edge between u and v. Parameters ---------- u,v: nodes Remove an edge between nodes u and v. key : hashable identifier, optional (default=None) Used to distinguish multiple edges between a pair of nodes. If None remove a single (abritrary) edge between u and v. Raises ------ NetworkXError If there is not an edge between u and v, or if there is no edge with the specified key. See Also -------- remove_edges_from : remove a collection of edges Examples -------- >>> G = nx.MultiGraph() >>> G.add_path([0,1,2,3]) >>> G.remove_edge(0,1) >>> e = (1,2) >>> G.remove_edge(*e) # unpacks e from an edge tuple For multiple edges >>> G = nx.MultiGraph() # or MultiDiGraph, etc >>> G.add_edges_from([(1,2),(1,2),(1,2)]) >>> G.remove_edge(1,2) # remove a single (arbitrary) edge For edges with keys >>> G = nx.MultiGraph() # or MultiDiGraph, etc >>> G.add_edge(1,2,key='first') >>> G.add_edge(1,2,key='second') >>> G.remove_edge(1,2,key='second') """ try: d=self.adj[u][v] except (KeyError): raise NetworkXError( "The edge %s-%s is not in the graph."%(u,v)) # remove the edge with specified data if key is None: d.popitem() else: try: del d[key] except (KeyError): raise NetworkXError( "The edge %s-%s with key %s is not in the graph."%(u,v,key)) if len(d)==0: # remove the key entries if last edge del self.adj[u][v] if u!=v: # check for selfloop del self.adj[v][u] def remove_edges_from(self, ebunch): """Remove all edges specified in ebunch. Parameters ---------- ebunch: list or container of edge tuples Each edge given in the list or container will be removed from the graph. The edges can be: - 2-tuples (u,v) All edges between u and v are removed. - 3-tuples (u,v,key) The edge identified by key is removed. - 4-tuples (u,v,key,data) where data is ignored. See Also -------- remove_edge : remove a single edge Notes ----- Will fail silently if an edge in ebunch is not in the graph. Examples -------- >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> ebunch=[(1,2),(2,3)] >>> G.remove_edges_from(ebunch) Removing multiple copies of edges >>> G = nx.MultiGraph() >>> G.add_edges_from([(1,2),(1,2),(1,2)]) >>> G.remove_edges_from([(1,2),(1,2)]) >>> G.edges() [(1, 2)] >>> G.remove_edges_from([(1,2),(1,2)]) # silently ignore extra copy >>> G.edges() # now empty graph [] """ for e in ebunch: try: self.remove_edge(*e[:3]) except NetworkXError: pass def has_edge(self, u, v, key=None): """Return True if the graph has an edge between nodes u and v. Parameters ---------- u,v : nodes Nodes can be, for example, strings or numbers. key : hashable identifier, optional (default=None) If specified return True only if the edge with key is found. Returns ------- edge_ind : bool True if edge is in the graph, False otherwise. Examples -------- Can be called either using two nodes u,v, an edge tuple (u,v), or an edge tuple (u,v,key). >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> G.has_edge(0,1) # using two nodes True >>> e = (0,1) >>> G.has_edge(*e) # e is a 2-tuple (u,v) True >>> G.add_edge(0,1,key='a') >>> G.has_edge(0,1,key='a') # specify key True >>> e=(0,1,'a') >>> G.has_edge(*e) # e is a 3-tuple (u,v,'a') True The following syntax are equivalent: >>> G.has_edge(0,1) True >>> 1 in G[0] # though this gives KeyError if 0 not in G True """ try: if key is None: return v in self.adj[u] else: return key in self.adj[u][v] except KeyError: return False def edges(self, nbunch=None, data=False, keys=False): """Return a list of edges. Edges are returned as tuples with optional data and keys in the order (node, neighbor, key, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) Return two tuples (u,v) (False) or three-tuples (u,v,data) (True). keys : bool, optional (default=False) Return two tuples (u,v) (False) or three-tuples (u,v,key) (True). Returns -------- edge_list: list of edge tuples Edges that are adjacent to any node in nbunch, or a list of all edges if nbunch is not specified. See Also -------- edges_iter : return an iterator over the edges Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-edges. Examples -------- >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> G.edges() [(0, 1), (1, 2), (2, 3)] >>> G.edges(data=True) # default edge data is {} (empty dictionary) [(0, 1, {}), (1, 2, {}), (2, 3, {})] >>> G.edges(keys=True) # default keys are integers [(0, 1, 0), (1, 2, 0), (2, 3, 0)] >>> G.edges(data=True,keys=True) # default keys are integers [(0, 1, 0, {}), (1, 2, 0, {}), (2, 3, 0, {})] >>> G.edges([0,3]) [(0, 1), (3, 2)] >>> G.edges(0) [(0, 1)] """ return list(self.edges_iter(nbunch, data=data,keys=keys)) def edges_iter(self, nbunch=None, data=False, keys=False): """Return an iterator over the edges. Edges are returned as tuples with optional data and keys in the order (node, neighbor, key, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict with each edge. keys : bool, optional (default=False) If True, return edge keys with each edge. Returns ------- edge_iter : iterator An iterator of (u,v), (u,v,d) or (u,v,key,d) tuples of edges. See Also -------- edges : return a list of edges Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-edges. Examples -------- >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> [e for e in G.edges_iter()] [(0, 1), (1, 2), (2, 3)] >>> list(G.edges_iter(data=True)) # default data is {} (empty dict) [(0, 1, {}), (1, 2, {}), (2, 3, {})] >>> list(G.edges(keys=True)) # default keys are integers [(0, 1, 0), (1, 2, 0), (2, 3, 0)] >>> list(G.edges(data=True,keys=True)) # default keys are integers [(0, 1, 0, {}), (1, 2, 0, {}), (2, 3, 0, {})] >>> list(G.edges_iter([0,3])) [(0, 1), (3, 2)] >>> list(G.edges_iter(0)) [(0, 1)] """ seen={} # helper dict to keep track of multiply stored edges if nbunch is None: nodes_nbrs = self.adj.items() else: nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch)) if data: for n,nbrs in nodes_nbrs: for nbr,keydict in nbrs.items(): if nbr not in seen: for key,data in keydict.items(): if keys: yield (n,nbr,key,data) else: yield (n,nbr,data) seen[n]=1 else: for n,nbrs in nodes_nbrs: for nbr,keydict in nbrs.items(): if nbr not in seen: for key,data in keydict.items(): if keys: yield (n,nbr,key) else: yield (n,nbr) seen[n] = 1 del seen def get_edge_data(self, u, v, key=None, default=None): """Return the attribute dictionary associated with edge (u,v). Parameters ---------- u,v : nodes default: any Python object (default=None) Value to return if the edge (u,v) is not found. key : hashable identifier, optional (default=None) Return data only for the edge with specified key. Returns ------- edge_dict : dictionary The edge attribute dictionary. Notes ----- It is faster to use G[u][v][key]. >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_edge(0,1,key='a',weight=7) >>> G[0][1]['a'] # key='a' {'weight': 7} Warning: Assigning G[u][v][key] corrupts the graph data structure. But it is safe to assign attributes to that dictionary, >>> G[0][1]['a']['weight'] = 10 >>> G[0][1]['a']['weight'] 10 >>> G[1][0]['a']['weight'] 10 Examples -------- >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> G.get_edge_data(0,1) {0: {}} >>> e = (0,1) >>> G.get_edge_data(*e) # tuple form {0: {}} >>> G.get_edge_data('a','b',default=0) # edge not in graph, return 0 0 """ try: if key is None: return self.adj[u][v] else: return self.adj[u][v][key] except KeyError: return default def degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, degree). The node degree is the number of edges adjacent to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> list(G.degree_iter(0)) # node 0 with degree 1 [(0, 1)] >>> list(G.degree_iter([0,1])) [(0, 1), (1, 2)] """ if nbunch is None: nodes_nbrs = self.adj.items() else: nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch)) if weight is None: for n,nbrs in nodes_nbrs: deg = sum([len(data) for data in nbrs.values()]) yield (n, deg+(n in nbrs and len(nbrs[n]))) else: # edge weighted graph - degree is sum of nbr edge weights for n,nbrs in nodes_nbrs: deg = sum([d.get(weight,1) for data in nbrs.values() for d in data.values()]) if n in nbrs: deg += sum([d.get(weight,1) for key,d in nbrs[n].items()]) yield (n, deg) def is_multigraph(self): """Return True if graph is a multigraph, False otherwise.""" return True def is_directed(self): """Return True if graph is directed, False otherwise.""" return False def to_directed(self): """Return a directed representation of the graph. Returns ------- G : MultiDiGraph A directed graph with the same name, same nodes, and with each edge (u,v,data) replaced by two directed edges (u,v,data) and (v,u,data). Notes ----- This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar D=DiGraph(G) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Examples -------- >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1), (1, 0)] If already directed, return a (deep) copy >>> G = nx.DiGraph() # or MultiDiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1)] """ from networkx.classes.multidigraph import MultiDiGraph G=MultiDiGraph() G.add_nodes_from(self) G.add_edges_from( (u,v,key,deepcopy(datadict)) for u,nbrs in self.adjacency_iter() for v,keydict in nbrs.items() for key,datadict in keydict.items() ) G.graph=deepcopy(self.graph) G.node=deepcopy(self.node) return G def selfloop_edges(self, data=False, keys=False): """Return a list of selfloop edges. A selfloop edge has the same node at both ends. Parameters ----------- data : bool, optional (default=False) Return selfloop edges as two tuples (u,v) (data=False) or three-tuples (u,v,data) (data=True) keys : bool, optional (default=False) If True, return edge keys with each edge. Returns ------- edgelist : list of edge tuples A list of all selfloop edges. See Also -------- nodes_with_selfloops, number_of_selfloops Examples -------- >>> G = nx.MultiGraph() # or MultiDiGraph >>> G.add_edge(1,1) >>> G.add_edge(1,2) >>> G.selfloop_edges() [(1, 1)] >>> G.selfloop_edges(data=True) [(1, 1, {})] >>> G.selfloop_edges(keys=True) [(1, 1, 0)] >>> G.selfloop_edges(keys=True, data=True) [(1, 1, 0, {})] """ if data: if keys: return [ (n,n,k,d) for n,nbrs in self.adj.items() if n in nbrs for k,d in nbrs[n].items()] else: return [ (n,n,d) for n,nbrs in self.adj.items() if n in nbrs for d in nbrs[n].values()] else: if keys: return [ (n,n,k) for n,nbrs in self.adj.items() if n in nbrs for k in nbrs[n].keys()] else: return [ (n,n) for n,nbrs in self.adj.items() if n in nbrs for d in nbrs[n].values()] def number_of_edges(self, u=None, v=None): """Return the number of edges between two nodes. Parameters ---------- u,v : nodes, optional (default=all edges) If u and v are specified, return the number of edges between u and v. Otherwise return the total number of all edges. Returns ------- nedges : int The number of edges in the graph. If nodes u and v are specified return the number of edges between those nodes. See Also -------- size Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.number_of_edges() 3 >>> G.number_of_edges(0,1) 1 >>> e = (0,1) >>> G.number_of_edges(*e) 1 """ if u is None: return self.size() try: edgedata=self.adj[u][v] except KeyError: return 0 # no such edge return len(edgedata) def subgraph(self, nbunch): """Return the subgraph induced on nodes in nbunch. The induced subgraph of the graph contains the nodes in nbunch and the edges between those nodes. Parameters ---------- nbunch : list, iterable A container of nodes which will be iterated through once. Returns ------- G : Graph A subgraph of the graph with the same edge attributes. Notes ----- The graph, edge or node attributes just point to the original graph. So changes to the node or edge structure will not be reflected in the original graph while changes to the attributes will. To create a subgraph with its own copy of the edge/node attributes use: nx.Graph(G.subgraph(nbunch)) If edge attributes are containers, a deep copy can be obtained using: G.subgraph(nbunch).copy() For an inplace reduction of a graph to a subgraph you can remove nodes: G.remove_nodes_from([ n in G if n not in set(nbunch)]) Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> H = G.subgraph([0,1,2]) >>> H.edges() [(0, 1), (1, 2)] """ bunch =self.nbunch_iter(nbunch) # create new graph and copy subgraph into it H = self.__class__() # copy node and attribute dictionaries for n in bunch: H.node[n]=self.node[n] # namespace shortcuts for speed H_adj=H.adj self_adj=self.adj # add nodes and edges (undirected method) for n in H: Hnbrs={} H_adj[n]=Hnbrs for nbr,edgedict in self_adj[n].items(): if nbr in H_adj: # add both representations of edge: n-nbr and nbr-n # they share the same edgedict ed=edgedict.copy() Hnbrs[nbr]=ed H_adj[nbr][n]=ed H.graph=self.graph return H networkx-1.8.1/networkx/classes/graph.py0000664000175000017500000015104212177456333020271 0ustar aricaric00000000000000"""Base class for undirected graphs. The Graph class allows any hashable object as a node and can associate key/value attribute pairs with each undirected edge. Self-loops are allowed but multiple edges are not (see MultiGraph). For directed graphs see DiGraph and MultiDiGraph. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from copy import deepcopy import networkx as nx from networkx.exception import NetworkXError import networkx.convert as convert __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) class Graph(object): """ Base class for undirected graphs. A Graph stores nodes and edges with optional data, or attributes. Graphs hold undirected edges. Self loops are allowed but multiple (parallel) edges are not. Nodes can be arbitrary (hashable) Python objects with optional key/value attributes. Edges are represented as links between nodes with optional key/value attributes. Parameters ---------- data : input graph Data to initialize graph. If data=None (default) an empty graph is created. The data can be an edge list, or any NetworkX graph object. If the corresponding optional Python packages are installed the data can also be a NumPy matrix or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to graph as key=value pairs. See Also -------- DiGraph MultiGraph MultiDiGraph Examples -------- Create an empty graph structure (a "null graph") with no nodes and no edges. >>> G = nx.Graph() G can be grown in several ways. **Nodes:** Add one node at a time: >>> G.add_node(1) Add the nodes from any container (a list, dict, set or even the lines from a file or the nodes from another graph). >>> G.add_nodes_from([2,3]) >>> G.add_nodes_from(range(100,110)) >>> H=nx.Graph() >>> H.add_path([0,1,2,3,4,5,6,7,8,9]) >>> G.add_nodes_from(H) In addition to strings and integers any hashable Python object (except None) can represent a node, e.g. a customized node object, or even another Graph. >>> G.add_node(H) **Edges:** G can also be grown by adding edges. Add one edge, >>> G.add_edge(1, 2) a list of edges, >>> G.add_edges_from([(1,2),(1,3)]) or a collection of edges, >>> G.add_edges_from(H.edges()) If some edges connect nodes not yet in the graph, the nodes are added automatically. There are no errors when adding nodes or edges that already exist. **Attributes:** Each graph, node, and edge can hold key/value attribute pairs in an associated attribute dictionary (the keys must be hashable). By default these are empty, but can be added or changed using add_edge, add_node or direct manipulation of the attribute dictionaries named graph, node and edge respectively. >>> G = nx.Graph(day="Friday") >>> G.graph {'day': 'Friday'} Add node attributes using add_node(), add_nodes_from() or G.node >>> G.add_node(1, time='5pm') >>> G.add_nodes_from([3], time='2pm') >>> G.node[1] {'time': '5pm'} >>> G.node[1]['room'] = 714 >>> del G.node[1]['room'] # remove attribute >>> G.nodes(data=True) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Warning: adding a node to G.node does not add it to the graph. Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edge. >>> G.add_edge(1, 2, weight=4.7 ) >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) >>> G[1][2]['weight'] = 4.7 >>> G.edge[1][2]['weight'] = 4 **Shortcuts:** Many common graph features allow python syntax to speed reporting. >>> 1 in G # check if node in graph True >>> [n for n in G if n<3] # iterate through nodes [1, 2] >>> len(G) # number of nodes in graph 5 The fastest way to traverse all edges of a graph is via adjacency_iter(), but the edges() method is often more convenient. >>> for n,nbrsdict in G.adjacency_iter(): ... for nbr,eattr in nbrsdict.items(): ... if 'weight' in eattr: ... (n,nbr,eattr['weight']) (1, 2, 4) (2, 1, 4) (2, 3, 8) (3, 2, 8) >>> [ (u,v,edata['weight']) for u,v,edata in G.edges(data=True) if 'weight' in edata ] [(1, 2, 4), (2, 3, 8)] **Reporting:** Simple graph information is obtained using methods. Iterator versions of many reporting methods exist for efficiency. Methods exist for reporting nodes(), edges(), neighbors() and degree() as well as the number of nodes and edges. For details on these and other miscellaneous methods, see below. """ def __init__(self, data=None, **attr): """Initialize a graph with edges, name, graph attributes. Parameters ---------- data : input graph Data to initialize graph. If data=None (default) an empty graph is created. The data can be an edge list, or any NetworkX graph object. If the corresponding optional Python packages are installed the data can also be a NumPy matrix or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph. name : string, optional (default='') An optional name for the graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to graph as key=value pairs. See Also -------- convert Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G = nx.Graph(name='my graph') >>> e = [(1,2),(2,3),(3,4)] # list of edges >>> G = nx.Graph(e) Arbitrary graph attribute pairs (key=value) may be assigned >>> G=nx.Graph(e, day="Friday") >>> G.graph {'day': 'Friday'} """ self.graph = {} # dictionary for graph attributes self.node = {} # empty node dict (created before convert) self.adj = {} # empty adjacency dict # attempt to load graph with data if data is not None: convert.to_networkx_graph(data,create_using=self) # load graph attributes (must be after convert) self.graph.update(attr) self.edge = self.adj @property def name(self): return self.graph.get('name','') @name.setter def name(self, s): self.graph['name']=s def __str__(self): """Return the graph name. Returns ------- name : string The name of the graph. Examples -------- >>> G = nx.Graph(name='foo') >>> str(G) 'foo' """ return self.name def __iter__(self): """Iterate over the nodes. Use the expression 'for n in G'. Returns ------- niter : iterator An iterator over all nodes in the graph. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) """ return iter(self.node) def __contains__(self,n): """Return True if n is a node, False otherwise. Use the expression 'n in G'. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> 1 in G True """ try: return n in self.node except TypeError: return False def __len__(self): """Return the number of nodes. Use the expression 'len(G)'. Returns ------- nnodes : int The number of nodes in the graph. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> len(G) 4 """ return len(self.node) def __getitem__(self, n): """Return a dict of neighbors of node n. Use the expression 'G[n]'. Parameters ---------- n : node A node in the graph. Returns ------- adj_dict : dictionary The adjacency dictionary for nodes connected to n. Notes ----- G[n] is similar to G.neighbors(n) but the internal data dictionary is returned instead of a list. Assigning G[n] will corrupt the internal graph data structure. Use G[n] for reading data only. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G[0] {1: {}} """ return self.adj[n] def add_node(self, n, attr_dict=None, **attr): """Add a single node n and update node attributes. Parameters ---------- n : node A node can be any hashable Python object except None. attr_dict : dictionary, optional (default= no attributes) Dictionary of node attributes. Key/value pairs will update existing data associated with the node. attr : keyword arguments, optional Set or change attributes using key=value. See Also -------- add_nodes_from Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_node(1) >>> G.add_node('Hello') >>> K3 = nx.Graph([(0,1),(1,2),(2,0)]) >>> G.add_node(K3) >>> G.number_of_nodes() 3 Use keywords set/change node attributes: >>> G.add_node(1,size=10) >>> G.add_node(3,weight=0.4,UTM=('13S',382871,3972649)) Notes ----- A hashable object is one that can be used as a key in a Python dictionary. This includes strings, numbers, tuples of strings and numbers, etc. On many platforms hashable items also include mutables such as NetworkX Graphs, though one should be careful that the hash doesn't change on mutables. """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") if n not in self.node: self.adj[n] = {} self.node[n] = attr_dict else: # update attr even if node already exists self.node[n].update(attr_dict) def add_nodes_from(self, nodes, **attr): """Add multiple nodes. Parameters ---------- nodes : iterable container A container of nodes (list, dict, set, etc.). OR A container of (node, attribute dict) tuples. Node attributes are updated using the attribute dict. attr : keyword arguments, optional (default= no attributes) Update attributes for all nodes in nodes. Node attributes specified in nodes as a tuple take precedence over attributes specified generally. See Also -------- add_node Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_nodes_from('Hello') >>> K3 = nx.Graph([(0,1),(1,2),(2,0)]) >>> G.add_nodes_from(K3) >>> sorted(G.nodes(),key=str) [0, 1, 2, 'H', 'e', 'l', 'o'] Use keywords to update specific node attributes for every node. >>> G.add_nodes_from([1,2], size=10) >>> G.add_nodes_from([3,4], weight=0.4) Use (node, attrdict) tuples to update attributes for specific nodes. >>> G.add_nodes_from([(1,dict(size=11)), (2,{'color':'blue'})]) >>> G.node[1]['size'] 11 >>> H = nx.Graph() >>> H.add_nodes_from(G.nodes(data=True)) >>> H.node[1]['size'] 11 """ for n in nodes: try: newnode=n not in self.node except TypeError: nn,ndict = n if nn not in self.node: self.adj[nn] = {} newdict = attr.copy() newdict.update(ndict) self.node[nn] = newdict else: olddict = self.node[nn] olddict.update(attr) olddict.update(ndict) continue if newnode: self.adj[n] = {} self.node[n] = attr.copy() else: self.node[n].update(attr) def remove_node(self,n): """Remove node n. Removes the node n and all adjacent edges. Attempting to remove a non-existent node will raise an exception. Parameters ---------- n : node A node in the graph Raises ------- NetworkXError If n is not in the graph. See Also -------- remove_nodes_from Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> G.edges() [(0, 1), (1, 2)] >>> G.remove_node(1) >>> G.edges() [] """ adj = self.adj try: nbrs = list(adj[n].keys()) # keys handles self-loops (allow mutation later) del self.node[n] except KeyError: # NetworkXError if n not in self raise NetworkXError("The node %s is not in the graph."%(n,)) for u in nbrs: del adj[u][n] # remove all edges n-u in graph del adj[n] # now remove node def remove_nodes_from(self, nodes): """Remove multiple nodes. Parameters ---------- nodes : iterable container A container of nodes (list, dict, set, etc.). If a node in the container is not in the graph it is silently ignored. See Also -------- remove_node Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> e = G.nodes() >>> e [0, 1, 2] >>> G.remove_nodes_from(e) >>> G.nodes() [] """ adj = self.adj for n in nodes: try: del self.node[n] for u in list(adj[n].keys()): # keys() handles self-loops del adj[u][n] #(allows mutation of dict in loop) del adj[n] except KeyError: pass def nodes_iter(self, data=False): """Return an iterator over the nodes. Parameters ---------- data : boolean, optional (default=False) If False the iterator returns nodes. If True return a two-tuple of node and node data dictionary Returns ------- niter : iterator An iterator over nodes. If data=True the iterator gives two-tuples containing (node, node data, dictionary) Notes ----- If the node data is not required it is simpler and equivalent to use the expression 'for n in G'. >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> [d for n,d in G.nodes_iter(data=True)] [{}, {}, {}] """ if data: return iter(self.node.items()) return iter(self.node) def nodes(self, data=False): """Return a list of the nodes in the graph. Parameters ---------- data : boolean, optional (default=False) If False return a list of nodes. If True return a two-tuple of node and node data dictionary Returns ------- nlist : list A list of nodes. If data=True a list of two-tuples containing (node, node data dictionary). Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> G.nodes() [0, 1, 2] >>> G.add_node(1, time='5pm') >>> G.nodes(data=True) [(0, {}), (1, {'time': '5pm'}), (2, {})] """ return list(self.nodes_iter(data=data)) def number_of_nodes(self): """Return the number of nodes in the graph. Returns ------- nnodes : int The number of nodes in the graph. See Also -------- order, __len__ which are identical Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> len(G) 3 """ return len(self.node) def order(self): """Return the number of nodes in the graph. Returns ------- nnodes : int The number of nodes in the graph. See Also -------- number_of_nodes, __len__ which are identical """ return len(self.node) def has_node(self, n): """Return True if the graph contains the node n. Parameters ---------- n : node Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> G.has_node(0) True It is more readable and simpler to use >>> 0 in G True """ try: return n in self.node except TypeError: return False def add_edge(self, u, v, attr_dict=None, **attr): """Add an edge between u and v. The nodes u and v will be automatically added if they are not already in the graph. Edge attributes can be specified with keywords or by providing a dictionary with key/value pairs. See examples below. Parameters ---------- u,v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with the edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edges_from : add a collection of edges Notes ----- Adding an edge that already exists updates the edge data. Many NetworkX algorithms designed for weighted graphs use as the edge weight a numerical value assigned to a keyword which by default is 'weight'. Examples -------- The following all add the edge e=(1,2) to graph G: >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> e = (1,2) >>> G.add_edge(1, 2) # explicit two-node form >>> G.add_edge(*e) # single edge as tuple of two nodes >>> G.add_edges_from( [(1,2)] ) # add edges from iterable container Associate data to edges using keywords: >>> G.add_edge(1, 2, weight=3) >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) """ # set up attribute dictionary if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") # add nodes if u not in self.node: self.adj[u] = {} self.node[u] = {} if v not in self.node: self.adj[v] = {} self.node[v] = {} # add the edge datadict=self.adj[u].get(v,{}) datadict.update(attr_dict) self.adj[u][v] = datadict self.adj[v][u] = datadict def add_edges_from(self, ebunch, attr_dict=None, **attr): """Add all the edges in ebunch. Parameters ---------- ebunch : container of edges Each edge given in the container will be added to the graph. The edges must be given as as 2-tuples (u,v) or 3-tuples (u,v,d) where d is a dictionary containing edge data. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with each edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edge : add a single edge add_weighted_edges_from : convenient way to add weighted edges Notes ----- Adding the same edge twice has no effect but any edge data will be updated when each duplicate edge is added. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples >>> e = zip(range(0,3),range(1,4)) >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1,2),(2,3)], weight=3) >>> G.add_edges_from([(3,4),(1,4)], label='WN2898') """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") # process ebunch for e in ebunch: ne=len(e) if ne==3: u,v,dd = e elif ne==2: u,v = e dd = {} else: raise NetworkXError(\ "Edge tuple %s must be a 2-tuple or 3-tuple."%(e,)) if u not in self.node: self.adj[u] = {} self.node[u] = {} if v not in self.node: self.adj[v] = {} self.node[v] = {} datadict=self.adj[u].get(v,{}) datadict.update(attr_dict) datadict.update(dd) self.adj[u][v] = datadict self.adj[v][u] = datadict def add_weighted_edges_from(self, ebunch, weight='weight', **attr): """Add all the edges in ebunch as weighted edges with specified weights. Parameters ---------- ebunch : container of edges Each edge given in the list or container will be added to the graph. The edges must be given as 3-tuples (u,v,w) where w is a number. weight : string, optional (default= 'weight') The attribute name for the edge weights to be added. attr : keyword arguments, optional (default= no attributes) Edge attributes to add/update for all edges. See Also -------- add_edge : add a single edge add_edges_from : add multiple edges Notes ----- Adding the same edge twice for Graph/DiGraph simply updates the edge data. For MultiGraph/MultiDiGraph, duplicate edges are stored. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_weighted_edges_from([(0,1,3.0),(1,2,7.5)]) """ self.add_edges_from(((u,v,{weight:d}) for u,v,d in ebunch),**attr) def remove_edge(self, u, v): """Remove the edge between u and v. Parameters ---------- u,v: nodes Remove the edge between nodes u and v. Raises ------ NetworkXError If there is not an edge between u and v. See Also -------- remove_edges_from : remove a collection of edges Examples -------- >>> G = nx.Graph() # or DiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.remove_edge(0,1) >>> e = (1,2) >>> G.remove_edge(*e) # unpacks e from an edge tuple >>> e = (2,3,{'weight':7}) # an edge with attribute data >>> G.remove_edge(*e[:2]) # select first part of edge tuple """ try: del self.adj[u][v] if u != v: # self-loop needs only one entry removed del self.adj[v][u] except KeyError: raise NetworkXError("The edge %s-%s is not in the graph"%(u,v)) def remove_edges_from(self, ebunch): """Remove all edges specified in ebunch. Parameters ---------- ebunch: list or container of edge tuples Each edge given in the list or container will be removed from the graph. The edges can be: - 2-tuples (u,v) edge between u and v. - 3-tuples (u,v,k) where k is ignored. See Also -------- remove_edge : remove a single edge Notes ----- Will fail silently if an edge in ebunch is not in the graph. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> ebunch=[(1,2),(2,3)] >>> G.remove_edges_from(ebunch) """ adj=self.adj for e in ebunch: u,v = e[:2] # ignore edge data if present if u in adj and v in adj[u]: del adj[u][v] if u != v: # self loop needs only one entry removed del adj[v][u] def has_edge(self, u, v): """Return True if the edge (u,v) is in the graph. Parameters ---------- u,v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. Returns ------- edge_ind : bool True if edge is in the graph, False otherwise. Examples -------- Can be called either using two nodes u,v or edge tuple (u,v) >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.has_edge(0,1) # using two nodes True >>> e = (0,1) >>> G.has_edge(*e) # e is a 2-tuple (u,v) True >>> e = (0,1,{'weight':7}) >>> G.has_edge(*e[:2]) # e is a 3-tuple (u,v,data_dictionary) True The following syntax are all equivalent: >>> G.has_edge(0,1) True >>> 1 in G[0] # though this gives KeyError if 0 not in G True """ try: return v in self.adj[u] except KeyError: return False def neighbors(self, n): """Return a list of the nodes connected to the node n. Parameters ---------- n : node A node in the graph Returns ------- nlist : list A list of nodes that are adjacent to n. Raises ------ NetworkXError If the node n is not in the graph. Notes ----- It is usually more convenient (and faster) to access the adjacency dictionary as G[n]: >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge('a','b',weight=7) >>> G['a'] {'b': {'weight': 7}} Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.neighbors(0) [1] """ try: return list(self.adj[n]) except KeyError: raise NetworkXError("The node %s is not in the graph."%(n,)) def neighbors_iter(self, n): """Return an iterator over all neighbors of node n. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> [n for n in G.neighbors_iter(0)] [1] Notes ----- It is faster to use the idiom "in G[0]", e.g. >>> G = nx.path_graph(4) >>> [n for n in G[0]] [1] """ try: return iter(self.adj[n]) except KeyError: raise NetworkXError("The node %s is not in the graph."%(n,)) def edges(self, nbunch=None, data=False): """Return a list of edges. Edges are returned as tuples with optional data in the order (node, neighbor, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) Return two tuples (u,v) (False) or three-tuples (u,v,data) (True). Returns -------- edge_list: list of edge tuples Edges that are adjacent to any node in nbunch, or a list of all edges if nbunch is not specified. See Also -------- edges_iter : return an iterator over the edges Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-edges. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.edges() [(0, 1), (1, 2), (2, 3)] >>> G.edges(data=True) # default edge data is {} (empty dictionary) [(0, 1, {}), (1, 2, {}), (2, 3, {})] >>> G.edges([0,3]) [(0, 1), (3, 2)] >>> G.edges(0) [(0, 1)] """ return list(self.edges_iter(nbunch, data)) def edges_iter(self, nbunch=None, data=False): """Return an iterator over the edges. Edges are returned as tuples with optional data in the order (node, neighbor, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict in 3-tuple (u,v,data). Returns ------- edge_iter : iterator An iterator of (u,v) or (u,v,d) tuples of edges. See Also -------- edges : return a list of edges Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-edges. Examples -------- >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_path([0,1,2,3]) >>> [e for e in G.edges_iter()] [(0, 1), (1, 2), (2, 3)] >>> list(G.edges_iter(data=True)) # default data is {} (empty dict) [(0, 1, {}), (1, 2, {}), (2, 3, {})] >>> list(G.edges_iter([0,3])) [(0, 1), (3, 2)] >>> list(G.edges_iter(0)) [(0, 1)] """ seen={} # helper dict to keep track of multiply stored edges if nbunch is None: nodes_nbrs = self.adj.items() else: nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch)) if data: for n,nbrs in nodes_nbrs: for nbr,data in nbrs.items(): if nbr not in seen: yield (n,nbr,data) seen[n]=1 else: for n,nbrs in nodes_nbrs: for nbr in nbrs: if nbr not in seen: yield (n,nbr) seen[n] = 1 del seen def get_edge_data(self, u, v, default=None): """Return the attribute dictionary associated with edge (u,v). Parameters ---------- u,v : nodes default: any Python object (default=None) Value to return if the edge (u,v) is not found. Returns ------- edge_dict : dictionary The edge attribute dictionary. Notes ----- It is faster to use G[u][v]. >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G[0][1] {} Warning: Assigning G[u][v] corrupts the graph data structure. But it is safe to assign attributes to that dictionary, >>> G[0][1]['weight'] = 7 >>> G[0][1]['weight'] 7 >>> G[1][0]['weight'] 7 Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.get_edge_data(0,1) # default edge data is {} {} >>> e = (0,1) >>> G.get_edge_data(*e) # tuple form {} >>> G.get_edge_data('a','b',default=0) # edge not in graph, return 0 0 """ try: return self.adj[u][v] except KeyError: return default def adjacency_list(self): """Return an adjacency list representation of the graph. The output adjacency list is in the order of G.nodes(). For directed graphs, only outgoing adjacencies are included. Returns ------- adj_list : lists of lists The adjacency structure of the graph as a list of lists. See Also -------- adjacency_iter Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.adjacency_list() # in order given by G.nodes() [[1], [0, 2], [1, 3], [2]] """ return list(map(list,iter(self.adj.values()))) def adjacency_iter(self): """Return an iterator of (node, adjacency dict) tuples for all nodes. This is the fastest way to look at every edge. For directed graphs, only outgoing adjacencies are included. Returns ------- adj_iter : iterator An iterator of (node, adjacency dictionary) for all nodes in the graph. See Also -------- adjacency_list Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> [(n,nbrdict) for n,nbrdict in G.adjacency_iter()] [(0, {1: {}}), (1, {0: {}, 2: {}}), (2, {1: {}, 3: {}}), (3, {2: {}})] """ return iter(self.adj.items()) def degree(self, nbunch=None, weight=None): """Return the degree of a node or nodes. The node degree is the number of edges adjacent to that node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and degree as values or a number if a single node is specified. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.degree(0) 1 >>> G.degree([0,1]) {0: 1, 1: 2} >>> list(G.degree([0,1]).values()) [1, 2] """ if nbunch in self: # return a single node return next(self.degree_iter(nbunch,weight))[1] else: # return a dict return dict(self.degree_iter(nbunch,weight)) def degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, degree). The node degree is the number of edges adjacent to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> list(G.degree_iter(0)) # node 0 with degree 1 [(0, 1)] >>> list(G.degree_iter([0,1])) [(0, 1), (1, 2)] """ if nbunch is None: nodes_nbrs = self.adj.items() else: nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch)) if weight is None: for n,nbrs in nodes_nbrs: yield (n,len(nbrs)+(n in nbrs)) # return tuple (n,degree) else: # edge weighted graph - degree is sum of nbr edge weights for n,nbrs in nodes_nbrs: yield (n, sum((nbrs[nbr].get(weight,1) for nbr in nbrs)) + (n in nbrs and nbrs[n].get(weight,1))) def clear(self): """Remove all nodes and edges from the graph. This also removes the name, and all graph, node, and edge attributes. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.clear() >>> G.nodes() [] >>> G.edges() [] """ self.name = '' self.adj.clear() self.node.clear() self.graph.clear() def copy(self): """Return a copy of the graph. Returns ------- G : Graph A copy of the graph. See Also -------- to_directed: return a directed copy of the graph. Notes ----- This makes a complete copy of the graph including all of the node or edge attributes. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> H = G.copy() """ return deepcopy(self) def is_multigraph(self): """Return True if graph is a multigraph, False otherwise.""" return False def is_directed(self): """Return True if graph is directed, False otherwise.""" return False def to_directed(self): """Return a directed representation of the graph. Returns ------- G : DiGraph A directed graph with the same name, same nodes, and with each edge (u,v,data) replaced by two directed edges (u,v,data) and (v,u,data). Notes ----- This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar D=DiGraph(G) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Examples -------- >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1), (1, 0)] If already directed, return a (deep) copy >>> G = nx.DiGraph() # or MultiDiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1)] """ from networkx import DiGraph G=DiGraph() G.name=self.name G.add_nodes_from(self) G.add_edges_from( ((u,v,deepcopy(data)) for u,nbrs in self.adjacency_iter() for v,data in nbrs.items()) ) G.graph=deepcopy(self.graph) G.node=deepcopy(self.node) return G def to_undirected(self): """Return an undirected copy of the graph. Returns ------- G : Graph/MultiGraph A deepcopy of the graph. See Also -------- copy, add_edge, add_edges_from Notes ----- This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar G=DiGraph(D) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Examples -------- >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1), (1, 0)] >>> G2 = H.to_undirected() >>> G2.edges() [(0, 1)] """ return deepcopy(self) def subgraph(self, nbunch): """Return the subgraph induced on nodes in nbunch. The induced subgraph of the graph contains the nodes in nbunch and the edges between those nodes. Parameters ---------- nbunch : list, iterable A container of nodes which will be iterated through once. Returns ------- G : Graph A subgraph of the graph with the same edge attributes. Notes ----- The graph, edge or node attributes just point to the original graph. So changes to the node or edge structure will not be reflected in the original graph while changes to the attributes will. To create a subgraph with its own copy of the edge/node attributes use: nx.Graph(G.subgraph(nbunch)) If edge attributes are containers, a deep copy can be obtained using: G.subgraph(nbunch).copy() For an inplace reduction of a graph to a subgraph you can remove nodes: G.remove_nodes_from([ n in G if n not in set(nbunch)]) Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> H = G.subgraph([0,1,2]) >>> H.edges() [(0, 1), (1, 2)] """ bunch =self.nbunch_iter(nbunch) # create new graph and copy subgraph into it H = self.__class__() # copy node and attribute dictionaries for n in bunch: H.node[n]=self.node[n] # namespace shortcuts for speed H_adj=H.adj self_adj=self.adj # add nodes and edges (undirected method) for n in H.node: Hnbrs={} H_adj[n]=Hnbrs for nbr,d in self_adj[n].items(): if nbr in H_adj: # add both representations of edge: n-nbr and nbr-n Hnbrs[nbr]=d H_adj[nbr][n]=d H.graph=self.graph return H def nodes_with_selfloops(self): """Return a list of nodes with self loops. A node with a self loop has an edge with both ends adjacent to that node. Returns ------- nodelist : list A list of nodes with self loops. See Also -------- selfloop_edges, number_of_selfloops Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge(1,1) >>> G.add_edge(1,2) >>> G.nodes_with_selfloops() [1] """ return [ n for n,nbrs in self.adj.items() if n in nbrs ] def selfloop_edges(self, data=False): """Return a list of selfloop edges. A selfloop edge has the same node at both ends. Parameters ----------- data : bool, optional (default=False) Return selfloop edges as two tuples (u,v) (data=False) or three-tuples (u,v,data) (data=True) Returns ------- edgelist : list of edge tuples A list of all selfloop edges. See Also -------- nodes_with_selfloops, number_of_selfloops Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge(1,1) >>> G.add_edge(1,2) >>> G.selfloop_edges() [(1, 1)] >>> G.selfloop_edges(data=True) [(1, 1, {})] """ if data: return [ (n,n,nbrs[n]) for n,nbrs in self.adj.items() if n in nbrs ] else: return [ (n,n) for n,nbrs in self.adj.items() if n in nbrs ] def number_of_selfloops(self): """Return the number of selfloop edges. A selfloop edge has the same node at both ends. Returns ------- nloops : int The number of selfloops. See Also -------- nodes_with_selfloops, selfloop_edges Examples -------- >>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge(1,1) >>> G.add_edge(1,2) >>> G.number_of_selfloops() 1 """ return len(self.selfloop_edges()) def size(self, weight=None): """Return the number of edges. Parameters ---------- weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. Returns ------- nedges : int The number of edges of sum of edge weights in the graph. See Also -------- number_of_edges Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.size() 3 >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edge('a','b',weight=2) >>> G.add_edge('b','c',weight=4) >>> G.size() 2 >>> G.size(weight='weight') 6.0 """ s=sum(self.degree(weight=weight).values())/2 if weight is None: return int(s) else: return float(s) def number_of_edges(self, u=None, v=None): """Return the number of edges between two nodes. Parameters ---------- u,v : nodes, optional (default=all edges) If u and v are specified, return the number of edges between u and v. Otherwise return the total number of all edges. Returns ------- nedges : int The number of edges in the graph. If nodes u and v are specified return the number of edges between those nodes. See Also -------- size Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.number_of_edges() 3 >>> G.number_of_edges(0,1) 1 >>> e = (0,1) >>> G.number_of_edges(*e) 1 """ if u is None: return int(self.size()) if v in self.adj[u]: return 1 else: return 0 def add_star(self, nodes, **attr): """Add a star. The first node in nodes is the middle of the star. It is connected to all other nodes. Parameters ---------- nodes : iterable container A container of nodes. attr : keyword arguments, optional (default= no attributes) Attributes to add to every edge in star. See Also -------- add_path, add_cycle Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_star([0,1,2,3]) >>> G.add_star([10,11,12],weight=2) """ nlist = list(nodes) v=nlist[0] edges=((v,n) for n in nlist[1:]) self.add_edges_from(edges, **attr) def add_path(self, nodes, **attr): """Add a path. Parameters ---------- nodes : iterable container A container of nodes. A path will be constructed from the nodes (in order) and added to the graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to every edge in path. See Also -------- add_star, add_cycle Examples -------- >>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.add_path([10,11,12],weight=7) """ nlist = list(nodes) edges=zip(nlist[:-1],nlist[1:]) self.add_edges_from(edges, **attr) def add_cycle(self, nodes, **attr): """Add a cycle. Parameters ---------- nodes: iterable container A container of nodes. A cycle will be constructed from the nodes (in order) and added to the graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to every edge in cycle. See Also -------- add_path, add_star Examples -------- >>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_cycle([0,1,2,3]) >>> G.add_cycle([10,11,12],weight=7) """ nlist = list(nodes) edges=zip(nlist,nlist[1:]+[nlist[0]]) self.add_edges_from(edges, **attr) def nbunch_iter(self, nbunch=None): """Return an iterator of nodes contained in nbunch that are also in the graph. The nodes in nbunch are checked for membership in the graph and if not are silently ignored. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. Returns ------- niter : iterator An iterator over nodes in nbunch that are also in the graph. If nbunch is None, iterate over all nodes in the graph. Raises ------ NetworkXError If nbunch is not a node or or sequence of nodes. If a node in nbunch is not hashable. See Also -------- Graph.__iter__ Notes ----- When nbunch is an iterator, the returned iterator yields values directly from nbunch, becoming exhausted when nbunch is exhausted. To test whether nbunch is a single node, one can use "if nbunch in self:", even after processing with this routine. If nbunch is not a node or a (possibly empty) sequence/iterator or None, a NetworkXError is raised. Also, if any object in nbunch is not hashable, a NetworkXError is raised. """ if nbunch is None: # include all nodes via iterator bunch=iter(self.adj.keys()) elif nbunch in self: # if nbunch is a single node bunch=iter([nbunch]) else: # if nbunch is a sequence of nodes def bunch_iter(nlist,adj): try: for n in nlist: if n in adj: yield n except TypeError as e: message=e.args[0] import sys sys.stdout.write(message) # capture error for non-sequence/iterator nbunch. if 'iter' in message: raise NetworkXError(\ "nbunch is not a node or a sequence of nodes.") # capture error for unhashable node. elif 'hashable' in message: raise NetworkXError(\ "Node %s in the sequence nbunch is not a valid node."%n) else: raise bunch=bunch_iter(nbunch,self.adj) return bunch networkx-1.8.1/networkx/classes/__init__.py0000664000175000017500000000035012177456333020722 0ustar aricaric00000000000000from networkx.classes.graph import Graph from networkx.classes.digraph import DiGraph from networkx.classes.multigraph import MultiGraph from networkx.classes.multidigraph import MultiDiGraph from networkx.classes.function import * networkx-1.8.1/networkx/classes/tests/0000775000175000017500000000000012177457361017757 5ustar aricaric00000000000000networkx-1.8.1/networkx/classes/tests/test_graph.py0000664000175000017500000005113312177456333022472 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx class BaseGraphTester(object): """ Tests for data-structure independent graph class features.""" def test_contains(self): G=self.K3 assert(1 in G ) assert(4 not in G ) assert('b' not in G ) assert([] not in G ) # no exception for nonhashable assert({1:1} not in G) # no exception for nonhashable def test_order(self): G=self.K3 assert_equal(len(G),3) assert_equal(G.order(),3) assert_equal(G.number_of_nodes(),3) def test_nodes_iter(self): G=self.K3 assert_equal(sorted(G.nodes_iter()),self.k3nodes) assert_equal(sorted(G.nodes_iter(data=True)),[(0,{}),(1,{}),(2,{})]) def test_nodes(self): G=self.K3 assert_equal(sorted(G.nodes()),self.k3nodes) assert_equal(sorted(G.nodes(data=True)),[(0,{}),(1,{}),(2,{})]) def test_has_node(self): G=self.K3 assert(G.has_node(1)) assert(not G.has_node(4)) assert(not G.has_node([])) # no exception for nonhashable assert(not G.has_node({1:1})) # no exception for nonhashable def test_has_edge(self): G=self.K3 assert_equal(G.has_edge(0,1),True) assert_equal(G.has_edge(0,-1),False) def test_neighbors(self): G=self.K3 assert_equal(sorted(G.neighbors(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.neighbors,-1) def test_neighbors_iter(self): G=self.K3 assert_equal(sorted(G.neighbors_iter(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.neighbors_iter,-1) def test_edges(self): G=self.K3 assert_equal(sorted(G.edges()),[(0,1),(0,2),(1,2)]) assert_equal(sorted(G.edges(0)),[(0,1),(0,2)]) assert_raises((KeyError,networkx.NetworkXError), G.edges,-1) def test_edges_iter(self): G=self.K3 assert_equal(sorted(G.edges_iter()),[(0,1),(0,2),(1,2)]) assert_equal(sorted(G.edges_iter(0)),[(0,1),(0,2)]) f=lambda x:list(G.edges_iter(x)) assert_raises((KeyError,networkx.NetworkXError), f, -1) def test_adjacency_list(self): G=self.K3 assert_equal(G.adjacency_list(),[[1,2],[0,2],[0,1]]) def test_degree(self): G=self.K3 assert_equal(list(G.degree().values()),[2,2,2]) assert_equal(G.degree(),{0:2,1:2,2:2}) assert_equal(G.degree(0),2) assert_equal(G.degree([0]),{0:2}) assert_raises((KeyError,networkx.NetworkXError), G.degree,-1) def test_weighted_degree(self): G=self.Graph() G.add_edge(1,2,weight=2) G.add_edge(2,3,weight=3) assert_equal(list(G.degree(weight='weight').values()),[2,5,3]) assert_equal(G.degree(weight='weight'),{1:2,2:5,3:3}) assert_equal(G.degree(1,weight='weight'),2) assert_equal(G.degree([1],weight='weight'),{1:2}) def test_degree_iter(self): G=self.K3 assert_equal(list(G.degree_iter()),[(0,2),(1,2),(2,2)]) assert_equal(dict(G.degree_iter()),{0:2,1:2,2:2}) assert_equal(list(G.degree_iter(0)),[(0,2)]) def test_size(self): G=self.K3 assert_equal(G.size(),3) assert_equal(G.number_of_edges(),3) def test_add_star(self): G=self.K3.copy() nlist=[12,13,14,15] G.add_star(nlist) assert_equal(sorted(G.edges(nlist)),[(12,13),(12,14),(12,15)]) G=self.K3.copy() G.add_star(nlist,weight=2.0) assert_equal(sorted(G.edges(nlist,data=True)),\ [(12,13,{'weight':2.}), (12,14,{'weight':2.}), (12,15,{'weight':2.})]) def test_add_path(self): G=self.K3.copy() nlist=[12,13,14,15] G.add_path(nlist) assert_equal(sorted(G.edges(nlist)),[(12,13),(13,14),(14,15)]) G=self.K3.copy() G.add_path(nlist,weight=2.0) assert_equal(sorted(G.edges(nlist,data=True)),\ [(12,13,{'weight':2.}), (13,14,{'weight':2.}), (14,15,{'weight':2.})]) def test_add_cycle(self): G=self.K3.copy() nlist=[12,13,14,15] oklists=[ [(12,13),(12,15),(13,14),(14,15)], \ [(12,13),(13,14),(14,15),(15,12)] ] G.add_cycle(nlist) assert_true(sorted(G.edges(nlist)) in oklists) G=self.K3.copy() oklists=[ [(12,13,{'weight':1.}),\ (12,15,{'weight':1.}),\ (13,14,{'weight':1.}),\ (14,15,{'weight':1.})], \ \ [(12,13,{'weight':1.}),\ (13,14,{'weight':1.}),\ (14,15,{'weight':1.}),\ (15,12,{'weight':1.})] \ ] G.add_cycle(nlist,weight=1.0) assert_true(sorted(G.edges(nlist,data=True)) in oklists) def test_nbunch_iter(self): G=self.K3 assert_equal(list(G.nbunch_iter()),self.k3nodes) # all nodes assert_equal(list(G.nbunch_iter(0)),[0]) # single node assert_equal(list(G.nbunch_iter([0,1])),[0,1]) # sequence # sequence with none in graph assert_equal(list(G.nbunch_iter([-1])),[]) # string sequence with none in graph assert_equal(list(G.nbunch_iter("foo")),[]) # node not in graph doesn't get caught upon creation of iterator bunch=G.nbunch_iter(-1) # but gets caught when iterator used assert_raises(networkx.NetworkXError,list,bunch) # unhashable doesn't get caught upon creation of iterator bunch=G.nbunch_iter([0,1,2,{}]) # but gets caught when iterator hits the unhashable assert_raises(networkx.NetworkXError,list,bunch) def test_selfloop_degree(self): G=self.Graph() G.add_edge(1,1) assert_equal(list(G.degree().values()),[2]) assert_equal(G.degree(),{1:2}) assert_equal(G.degree(1),2) assert_equal(G.degree([1]),{1:2}) assert_equal(G.degree([1],weight='weight'),{1:2}) def test_selfloops(self): G=self.K3.copy() G.add_edge(0,0) assert_equal(G.nodes_with_selfloops(),[0]) assert_equal(G.selfloop_edges(),[(0,0)]) assert_equal(G.number_of_selfloops(),1) G.remove_edge(0,0) G.add_edge(0,0) G.remove_edges_from([(0,0)]) G.add_edge(1,1) G.remove_node(1) G.add_edge(0,0) G.add_edge(1,1) G.remove_nodes_from([0,1]) class BaseAttrGraphTester(BaseGraphTester): """ Tests of graph class attribute features.""" def test_weighted_degree(self): G=self.Graph() G.add_edge(1,2,weight=2,other=3) G.add_edge(2,3,weight=3,other=4) assert_equal(list(G.degree(weight='weight').values()),[2,5,3]) assert_equal(G.degree(weight='weight'),{1:2,2:5,3:3}) assert_equal(G.degree(1,weight='weight'),2) assert_equal(G.degree([1],weight='weight'),{1:2}) assert_equal(list(G.degree(weight='other').values()),[3,7,4]) assert_equal(G.degree(weight='other'),{1:3,2:7,3:4}) assert_equal(G.degree(1,weight='other'),3) assert_equal(G.degree([1],weight='other'),{1:3}) def add_attributes(self,G): G.graph['foo']=[] G.node[0]['foo']=[] G.remove_edge(1,2) ll=[] G.add_edge(1,2,foo=ll) G.add_edge(2,1,foo=ll) # attr_dict must be dict assert_raises(networkx.NetworkXError,G.add_edge,0,1,attr_dict=[]) def test_name(self): G=self.Graph(name='') assert_equal(G.name,"") G=self.Graph(name='test') assert_equal(G.__str__(),"test") assert_equal(G.name,"test") def test_copy(self): G=self.K3 self.add_attributes(G) H=G.copy() self.is_deepcopy(H,G) H=G.__class__(G) self.is_shallow_copy(H,G) def test_copy_attr(self): G=self.Graph(foo=[]) G.add_node(0,foo=[]) G.add_edge(1,2,foo=[]) H=G.copy() self.is_deepcopy(H,G) H=G.__class__(G) # just copy self.is_shallow_copy(H,G) def is_deepcopy(self,H,G): self.graphs_equal(H,G) self.different_attrdict(H,G) self.deep_copy_attrdict(H,G) def deep_copy_attrdict(self,H,G): self.deepcopy_graph_attr(H,G) self.deepcopy_node_attr(H,G) self.deepcopy_edge_attr(H,G) def deepcopy_graph_attr(self,H,G): assert_equal(G.graph['foo'],H.graph['foo']) G.graph['foo'].append(1) assert_not_equal(G.graph['foo'],H.graph['foo']) def deepcopy_node_attr(self,H,G): assert_equal(G.node[0]['foo'],H.node[0]['foo']) G.node[0]['foo'].append(1) assert_not_equal(G.node[0]['foo'],H.node[0]['foo']) def deepcopy_edge_attr(self,H,G): assert_equal(G[1][2]['foo'],H[1][2]['foo']) G[1][2]['foo'].append(1) assert_not_equal(G[1][2]['foo'],H[1][2]['foo']) def is_shallow_copy(self,H,G): self.graphs_equal(H,G) self.different_attrdict(H,G) self.shallow_copy_attrdict(H,G) def shallow_copy_attrdict(self,H,G): self.shallow_copy_graph_attr(H,G) self.shallow_copy_node_attr(H,G) self.shallow_copy_edge_attr(H,G) def shallow_copy_graph_attr(self,H,G): assert_equal(G.graph['foo'],H.graph['foo']) G.graph['foo'].append(1) assert_equal(G.graph['foo'],H.graph['foo']) def shallow_copy_node_attr(self,H,G): assert_equal(G.node[0]['foo'],H.node[0]['foo']) G.node[0]['foo'].append(1) assert_equal(G.node[0]['foo'],H.node[0]['foo']) def shallow_copy_edge_attr(self,H,G): assert_equal(G[1][2]['foo'],H[1][2]['foo']) G[1][2]['foo'].append(1) assert_equal(G[1][2]['foo'],H[1][2]['foo']) def same_attrdict(self, H, G): old_foo=H[1][2]['foo'] H.add_edge(1,2,foo='baz') assert_equal(G.edge,H.edge) H.add_edge(1,2,foo=old_foo) assert_equal(G.edge,H.edge) old_foo=H.node[0]['foo'] H.node[0]['foo']='baz' assert_equal(G.node,H.node) H.node[0]['foo']=old_foo assert_equal(G.node,H.node) def different_attrdict(self, H, G): old_foo=H[1][2]['foo'] H.add_edge(1,2,foo='baz') assert_not_equal(G.edge,H.edge) H.add_edge(1,2,foo=old_foo) assert_equal(G.edge,H.edge) old_foo=H.node[0]['foo'] H.node[0]['foo']='baz' assert_not_equal(G.node,H.node) H.node[0]['foo']=old_foo assert_equal(G.node,H.node) def graphs_equal(self,H,G): assert_equal(G.adj,H.adj) assert_equal(G.edge,H.edge) assert_equal(G.node,H.node) assert_equal(G.graph,H.graph) assert_equal(G.name,H.name) if not G.is_directed() and not H.is_directed(): assert_true(H.adj[1][2] is H.adj[2][1]) assert_true(G.adj[1][2] is G.adj[2][1]) else: # at least one is directed if not G.is_directed(): G.pred=G.adj G.succ=G.adj if not H.is_directed(): H.pred=H.adj H.succ=H.adj assert_equal(G.pred,H.pred) assert_equal(G.succ,H.succ) assert_true(H.succ[1][2] is H.pred[2][1]) assert_true(G.succ[1][2] is G.pred[2][1]) def test_graph_attr(self): G=self.K3 G.graph['foo']='bar' assert_equal(G.graph['foo'], 'bar') del G.graph['foo'] assert_equal(G.graph, {}) H=self.Graph(foo='bar') assert_equal(H.graph['foo'], 'bar') def test_node_attr(self): G=self.K3 G.add_node(1,foo='bar') assert_equal(G.nodes(), [0,1,2]) assert_equal(G.nodes(data=True), [(0,{}),(1,{'foo':'bar'}),(2,{})]) G.node[1]['foo']='baz' assert_equal(G.nodes(data=True), [(0,{}),(1,{'foo':'baz'}),(2,{})]) def test_node_attr2(self): G=self.K3 a={'foo':'bar'} G.add_node(3,attr_dict=a) assert_equal(G.nodes(), [0,1,2,3]) assert_equal(G.nodes(data=True), [(0,{}),(1,{}),(2,{}),(3,{'foo':'bar'})]) def test_edge_attr(self): G=self.Graph() G.add_edge(1,2,foo='bar') assert_equal(G.edges(data=True), [(1,2,{'foo':'bar'})]) def test_edge_attr2(self): G=self.Graph() G.add_edges_from([(1,2),(3,4)],foo='foo') assert_equal(sorted(G.edges(data=True)), [(1,2,{'foo':'foo'}),(3,4,{'foo':'foo'})]) def test_edge_attr3(self): G=self.Graph() G.add_edges_from([(1,2,{'weight':32}),(3,4,{'weight':64})],foo='foo') assert_equal(G.edges(data=True), [(1,2,{'foo':'foo','weight':32}),\ (3,4,{'foo':'foo','weight':64})]) G.remove_edges_from([(1,2),(3,4)]) G.add_edge(1,2,data=7,spam='bar',bar='foo') assert_equal(G.edges(data=True), [(1,2,{'data':7,'spam':'bar','bar':'foo'})]) def test_edge_attr4(self): G=self.Graph() G.add_edge(1,2,data=7,spam='bar',bar='foo') assert_equal(G.edges(data=True), [(1,2,{'data':7,'spam':'bar','bar':'foo'})]) G[1][2]['data']=10 # OK to set data like this assert_equal(G.edges(data=True), [(1,2,{'data':10,'spam':'bar','bar':'foo'})]) G.edge[1][2]['data']=20 # another spelling, "edge" assert_equal(G.edges(data=True), [(1,2,{'data':20,'spam':'bar','bar':'foo'})]) G.edge[1][2]['listdata']=[20,200] G.edge[1][2]['weight']=20 assert_equal(G.edges(data=True), [(1,2,{'data':20,'spam':'bar', 'bar':'foo','listdata':[20,200],'weight':20})]) def test_attr_dict_not_dict(self): # attr_dict must be dict G=self.Graph() edges=[(1,2)] assert_raises(networkx.NetworkXError,G.add_edges_from,edges, attr_dict=[]) def test_to_undirected(self): G=self.K3 self.add_attributes(G) H=networkx.Graph(G) self.is_shallow_copy(H,G) H=G.to_undirected() self.is_deepcopy(H,G) def test_to_directed(self): G=self.K3 self.add_attributes(G) H=networkx.DiGraph(G) self.is_shallow_copy(H,G) H=G.to_directed() self.is_deepcopy(H,G) def test_subgraph(self): G=self.K3 self.add_attributes(G) H=G.subgraph([0,1,2,5]) # assert_equal(H.name, 'Subgraph of ('+G.name+')') H.name=G.name self.graphs_equal(H,G) self.same_attrdict(H,G) self.shallow_copy_attrdict(H,G) H=G.subgraph(0) assert_equal(H.adj,{0:{}}) H=G.subgraph([]) assert_equal(H.adj,{}) assert_not_equal(G.adj,{}) def test_selfloops_attr(self): G=self.K3.copy() G.add_edge(0,0) G.add_edge(1,1,weight=2) assert_equal(G.selfloop_edges(data=True), [(0,0,{}),(1,1,{'weight':2})]) class TestGraph(BaseAttrGraphTester): """Tests specific to dict-of-dict-of-dict graph data structure""" def setUp(self): self.Graph=networkx.Graph # build dict-of-dict-of-dict K3 ed1,ed2,ed3 = ({},{},{}) self.k3adj={0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges=[(0, 1), (0, 2), (1, 2)] self.k3nodes=[0, 1, 2] self.K3=self.Graph() self.K3.adj=self.K3.edge=self.k3adj self.K3.node={} self.K3.node[0]={} self.K3.node[1]={} self.K3.node[2]={} def test_data_input(self): G=self.Graph(data={1:[2],2:[1]}, name="test") assert_equal(G.name,"test") assert_equal(sorted(G.adj.items()),[(1, {2: {}}), (2, {1: {}})]) G=self.Graph({1:[2],2:[1]}, name="test") assert_equal(G.name,"test") assert_equal(sorted(G.adj.items()),[(1, {2: {}}), (2, {1: {}})]) def test_adjacency_iter(self): G=self.K3 assert_equal(dict(G.adjacency_iter()), {0: {1: {}, 2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}}) def test_getitem(self): G=self.K3 assert_equal(G[0],{1: {}, 2: {}}) assert_raises(KeyError, G.__getitem__, 'j') assert_raises((TypeError,networkx.NetworkXError), G.__getitem__, ['A']) def test_add_node(self): G=self.Graph() G.add_node(0) assert_equal(G.adj,{0:{}}) # test add attributes G.add_node(1,c='red') G.add_node(2,{'c':'blue'}) G.add_node(3,{'c':'blue'},c='red') assert_raises(networkx.NetworkXError, G.add_node, 4, []) assert_raises(networkx.NetworkXError, G.add_node, 4, 4) assert_equal(G.node[1]['c'],'red') assert_equal(G.node[2]['c'],'blue') assert_equal(G.node[3]['c'],'red') # test updating attributes G.add_node(1,c='blue') G.add_node(2,{'c':'red'}) G.add_node(3,{'c':'red'},c='blue') assert_equal(G.node[1]['c'],'blue') assert_equal(G.node[2]['c'],'red') assert_equal(G.node[3]['c'],'blue') def test_add_nodes_from(self): G=self.Graph() G.add_nodes_from([0,1,2]) assert_equal(G.adj,{0:{},1:{},2:{}}) # test add attributes G.add_nodes_from([0,1,2],c='red') assert_equal(G.node[0]['c'],'red') assert_equal(G.node[2]['c'],'red') # test that attribute dicts are not the same assert(G.node[0] is not G.node[1]) # test updating attributes G.add_nodes_from([0,1,2],c='blue') assert_equal(G.node[0]['c'],'blue') assert_equal(G.node[2]['c'],'blue') assert(G.node[0] is not G.node[1]) # test tuple input H=self.Graph() H.add_nodes_from(G.nodes(data=True)) assert_equal(H.node[0]['c'],'blue') assert_equal(H.node[2]['c'],'blue') assert(H.node[0] is not H.node[1]) # specific overrides general H.add_nodes_from([0,(1,{'c':'green'}),(3,{'c':'cyan'})],c='red') assert_equal(H.node[0]['c'],'red') assert_equal(H.node[1]['c'],'green') assert_equal(H.node[2]['c'],'blue') assert_equal(H.node[3]['c'],'cyan') def test_remove_node(self): G=self.K3 G.remove_node(0) assert_equal(G.adj,{1:{2:{}},2:{1:{}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_node,-1) # generator here to implement list,set,string... def test_remove_nodes_from(self): G=self.K3 G.remove_nodes_from([0,1]) assert_equal(G.adj,{2:{}}) G.remove_nodes_from([-1]) # silent fail def test_add_edge(self): G=self.Graph() G.add_edge(0,1) assert_equal(G.adj,{0: {1: {}}, 1: {0: {}}}) G=self.Graph() G.add_edge(*(0,1)) assert_equal(G.adj,{0: {1: {}}, 1: {0: {}}}) def test_add_edges_from(self): G=self.Graph() G.add_edges_from([(0,1),(0,2,{'weight':3})]) assert_equal(G.adj,{0: {1:{}, 2:{'weight':3}}, 1: {0:{}}, \ 2:{0:{'weight':3}}}) G=self.Graph() G.add_edges_from([(0,1),(0,2,{'weight':3}),(1,2,{'data':4})],data=2) assert_equal(G.adj,{\ 0: {1:{'data':2}, 2:{'weight':3,'data':2}}, \ 1: {0:{'data':2}, 2:{'data':4}}, \ 2: {0:{'weight':3,'data':2}, 1:{'data':4}} \ }) assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,)]) # too few in tuple assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,1,2,3)]) # too many in tuple assert_raises(TypeError, G.add_edges_from,[0]) # not a tuple def test_remove_edge(self): G=self.K3 G.remove_edge(0,1) assert_equal(G.adj,{0:{2:{}},1:{2:{}},2:{0:{},1:{}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0) def test_remove_edges_from(self): G=self.K3 G.remove_edges_from([(0,1)]) assert_equal(G.adj,{0:{2:{}},1:{2:{}},2:{0:{},1:{}}}) G.remove_edges_from([(0,0)]) # silent fail def test_clear(self): G=self.K3 G.clear() assert_equal(G.adj,{}) def test_edges_data(self): G=self.K3 assert_equal(sorted(G.edges(data=True)),[(0,1,{}),(0,2,{}),(1,2,{})]) assert_equal(sorted(G.edges(0,data=True)),[(0,1,{}),(0,2,{})]) assert_raises((KeyError,networkx.NetworkXError), G.edges,-1) def test_get_edge_data(self): G=self.K3 assert_equal(G.get_edge_data(0,1),{}) assert_equal(G[0][1],{}) assert_equal(G.get_edge_data(10,20),None) assert_equal(G.get_edge_data(-1,0),None) assert_equal(G.get_edge_data(-1,0,default=1),1) networkx-1.8.1/networkx/classes/tests/test_multigraph.py0000664000175000017500000002060612177456333023546 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx from test_graph import BaseAttrGraphTester, TestGraph class BaseMultiGraphTester(BaseAttrGraphTester): def test_has_edge(self): G=self.K3 assert_equal(G.has_edge(0,1),True) assert_equal(G.has_edge(0,-1),False) assert_equal(G.has_edge(0,1,0),True) assert_equal(G.has_edge(0,1,1),False) def test_get_edge_data(self): G=self.K3 assert_equal(G.get_edge_data(0,1),{0:{}}) assert_equal(G[0][1],{0:{}}) assert_equal(G[0][1][0],{}) assert_equal(G.get_edge_data(10,20),None) assert_equal(G.get_edge_data(0,1,0),{}) def test_adjacency_iter(self): G=self.K3 assert_equal(dict(G.adjacency_iter()), {0: {1: {0:{}}, 2: {0:{}}}, 1: {0: {0:{}}, 2: {0:{}}}, 2: {0: {0:{}}, 1: {0:{}}}}) def deepcopy_edge_attr(self,H,G): assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) G[1][2][0]['foo'].append(1) assert_not_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) def shallow_copy_edge_attr(self,H,G): assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) G[1][2][0]['foo'].append(1) assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) def same_attrdict(self, H, G): # same attrdict in the edgedata old_foo=H[1][2][0]['foo'] H.add_edge(1,2,0,foo='baz') assert_equal(G.edge,H.edge) H.add_edge(1,2,0,foo=old_foo) assert_equal(G.edge,H.edge) # but not same edgedata dict H.add_edge(1,2,foo='baz') assert_not_equal(G.edge,H.edge) old_foo=H.node[0]['foo'] H.node[0]['foo']='baz' assert_equal(G.node,H.node) H.node[0]['foo']=old_foo assert_equal(G.node,H.node) def different_attrdict(self, H, G): # used by graph_equal_but_different old_foo=H[1][2][0]['foo'] H.add_edge(1,2,0,foo='baz') assert_not_equal(G.edge,H.edge) H.add_edge(1,2,0,foo=old_foo) assert_equal(G.edge,H.edge) HH=H.copy() H.add_edge(1,2,foo='baz') assert_not_equal(G.edge,H.edge) H=HH old_foo=H.node[0]['foo'] H.node[0]['foo']='baz' assert_not_equal(G.node,H.node) H.node[0]['foo']=old_foo assert_equal(G.node,H.node) def test_to_undirected(self): G=self.K3 self.add_attributes(G) H=networkx.MultiGraph(G) self.is_shallow_copy(H,G) H=G.to_undirected() self.is_deepcopy(H,G) def test_to_directed(self): G=self.K3 self.add_attributes(G) H=networkx.MultiDiGraph(G) self.is_shallow_copy(H,G) H=G.to_directed() self.is_deepcopy(H,G) def test_selfloops(self): G=self.K3 G.add_edge(0,0) assert_equal(G.nodes_with_selfloops(),[0]) assert_equal(G.selfloop_edges(),[(0,0)]) assert_equal(G.selfloop_edges(data=True),[(0,0,{})]) assert_equal(G.number_of_selfloops(),1) def test_selfloops2(self): G=self.K3 G.add_edge(0,0) G.add_edge(0,0) G.add_edge(0,0,key='parallel edge') G.remove_edge(0,0,key='parallel edge') assert_equal(G.number_of_edges(0,0),2) G.remove_edge(0,0) assert_equal(G.number_of_edges(0,0),1) def test_edge_attr4(self): G=self.Graph() G.add_edge(1,2,key=0,data=7,spam='bar',bar='foo') assert_equal(G.edges(data=True), [(1,2,{'data':7,'spam':'bar','bar':'foo'})]) G[1][2][0]['data']=10 # OK to set data like this assert_equal(G.edges(data=True), [(1,2,{'data':10,'spam':'bar','bar':'foo'})]) G.edge[1][2][0]['data']=20 # another spelling, "edge" assert_equal(G.edges(data=True), [(1,2,{'data':20,'spam':'bar','bar':'foo'})]) G.edge[1][2][0]['listdata']=[20,200] G.edge[1][2][0]['weight']=20 assert_equal(G.edges(data=True), [(1,2,{'data':20,'spam':'bar', 'bar':'foo','listdata':[20,200],'weight':20})]) class TestMultiGraph(BaseMultiGraphTester,TestGraph): def setUp(self): self.Graph=networkx.MultiGraph # build K3 ed1,ed2,ed3 = ({0:{}},{0:{}},{0:{}}) self.k3adj={0: {1: ed1, 2: ed2}, 1: {0: ed1, 2: ed3}, 2: {0: ed2, 1: ed3}} self.k3edges=[(0, 1), (0, 2), (1, 2)] self.k3nodes=[0, 1, 2] self.K3=self.Graph() self.K3.adj = self.K3.edge = self.k3adj self.K3.node={} self.K3.node[0]={} self.K3.node[1]={} self.K3.node[2]={} def test_data_input(self): G=self.Graph(data={1:[2],2:[1]}, name="test") assert_equal(G.name,"test") assert_equal(sorted(G.adj.items()),[(1, {2: {0:{}}}), (2, {1: {0:{}}})]) def test_getitem(self): G=self.K3 assert_equal(G[0],{1: {0:{}}, 2: {0:{}}}) assert_raises(KeyError, G.__getitem__, 'j') assert_raises((TypeError,networkx.NetworkXError), G.__getitem__, ['A']) def test_remove_node(self): G=self.K3 G.remove_node(0) assert_equal(G.adj,{1:{2:{0:{}}},2:{1:{0:{}}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_node,-1) def test_add_edge(self): G=self.Graph() G.add_edge(0,1) assert_equal(G.adj,{0: {1: {0:{}}}, 1: {0: {0:{}}}}) G=self.Graph() G.add_edge(*(0,1)) assert_equal(G.adj,{0: {1: {0:{}}}, 1: {0: {0:{}}}}) def test_add_edge_conflicting_key(self): G=self.Graph() G.add_edge(0,1,key=1) G.add_edge(0,1) assert_equal(G.number_of_edges(),2) G=self.Graph() G.add_edges_from([(0,1,1,{})]) G.add_edges_from([(0,1)]) assert_equal(G.number_of_edges(),2) def test_add_edges_from(self): G=self.Graph() G.add_edges_from([(0,1),(0,1,{'weight':3})]) assert_equal(G.adj,{0: {1: {0:{},1:{'weight':3}}}, 1: {0: {0:{},1:{'weight':3}}}}) G.add_edges_from([(0,1),(0,1,{'weight':3})],weight=2) assert_equal(G.adj,{0: {1: {0:{},1:{'weight':3}, 2:{'weight':2},3:{'weight':3}}}, 1: {0: {0:{},1:{'weight':3}, 2:{'weight':2},3:{'weight':3}}}}) # too few in tuple assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,)]) # too many in tuple assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,1,2,3,4)]) assert_raises(TypeError, G.add_edges_from,[0]) # not a tuple def test_remove_edge(self): G=self.K3 G.remove_edge(0,1) assert_equal(G.adj,{0: {2: {0: {}}}, 1: {2: {0: {}}}, 2: {0: {0: {}}, 1: {0: {}}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,0,2, key=1) def test_remove_edges_from(self): G=self.K3.copy() G.remove_edges_from([(0,1)]) assert_equal(G.adj,{0:{2:{0:{}}},1:{2:{0:{}}},2:{0:{0:{}},1:{0:{}}}}) G.remove_edges_from([(0,0)]) # silent fail self.K3.add_edge(0,1) G=self.K3.copy() G.remove_edges_from(G.edges(data=True,keys=True)) assert_equal(G.adj,{0:{},1:{},2:{}}) G=self.K3.copy() G.remove_edges_from(G.edges(data=False,keys=True)) assert_equal(G.adj,{0:{},1:{},2:{}}) G=self.K3.copy() G.remove_edges_from(G.edges(data=False,keys=False)) assert_equal(G.adj,{0:{},1:{},2:{}}) G=self.K3.copy() G.remove_edges_from([(0,1,0),(0,2,0,{}),(1,2)]) assert_equal(G.adj,{0:{1:{1:{}}},1:{0:{1:{}}},2:{}}) def test_remove_multiedge(self): G=self.K3 G.add_edge(0,1,key='parallel edge') G.remove_edge(0,1,key='parallel edge') assert_equal(G.adj,{0: {1: {0:{}}, 2: {0:{}}}, 1: {0: {0:{}}, 2: {0:{}}}, 2: {0: {0:{}}, 1: {0:{}}}}) G.remove_edge(0,1) assert_equal(G.adj,{0:{2:{0:{}}},1:{2:{0:{}}},2:{0:{0:{}},1:{0:{}}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0) networkx-1.8.1/networkx/classes/tests/test_graph_historical.py0000664000175000017500000000044712177456333024715 0ustar aricaric00000000000000#!/usr/bin/env python """Original NetworkX graph tests""" from nose.tools import * import networkx import networkx as nx from historical_tests import HistoricalTests class TestGraphHistorical(HistoricalTests): def setUp(self): HistoricalTests.setUp(self) self.G=nx.Graph networkx-1.8.1/networkx/classes/tests/test_digraph_historical.py0000664000175000017500000001045112177456333025226 0ustar aricaric00000000000000#!/usr/bin/env python """Original NetworkX graph tests""" from nose.tools import * import networkx import networkx as nx from historical_tests import HistoricalTests class TestDiGraphHistorical(HistoricalTests): def setUp(self): HistoricalTests.setUp(self) self.G=nx.DiGraph def test_in_degree(self): G=self.G() G.add_nodes_from('GJK') G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('B', 'C'), ('C', 'D')]) assert_equal(sorted(G.in_degree().values()),[0, 0, 0, 0, 1, 2, 2]) assert_equal(G.in_degree(), {'A': 0, 'C': 2, 'B': 1, 'D': 2, 'G': 0, 'K': 0, 'J': 0}) assert_equal(sorted([v for k,v in G.in_degree_iter()]), [0, 0, 0, 0, 1, 2, 2]) assert_equal(dict(G.in_degree_iter()), {'A': 0, 'C': 2, 'B': 1, 'D': 2, 'G': 0, 'K': 0, 'J': 0}) def test_out_degree(self): G=self.G() G.add_nodes_from('GJK') G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('B', 'C'), ('C', 'D')]) assert_equal(sorted(G.out_degree().values()),[0, 0, 0, 0, 1, 2, 2]) assert_equal(G.out_degree(), {'A': 2, 'C': 1, 'B': 2, 'D': 0, 'G': 0, 'K': 0, 'J': 0}) assert_equal(sorted([v for k,v in G.in_degree_iter()]), [0, 0, 0, 0, 1, 2, 2]) assert_equal(dict(G.out_degree_iter()), {'A': 2, 'C': 1, 'B': 2, 'D': 0, 'G': 0, 'K': 0, 'J': 0}) def test_degree_digraph(self): H=nx.DiGraph() H.add_edges_from([(1,24),(1,2)]) assert_equal(sorted(H.in_degree([1,24]).values()),[0, 1]) assert_equal(sorted(H.out_degree([1,24]).values()),[0, 2]) assert_equal(sorted(H.degree([1,24]).values()),[1, 2]) def test_neighbors(self): G=self.G() G.add_nodes_from('GJK') G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('B', 'C'), ('C', 'D')]) assert_equal(sorted(G.neighbors('C')),['D']) assert_equal(sorted(G['C']),['D']) assert_equal(sorted(G.neighbors('A')),['B', 'C']) assert_equal(sorted(G.neighbors_iter('A')),['B', 'C']) assert_equal(sorted(G.neighbors_iter('C')),['D']) assert_equal(sorted(G.neighbors('A')),['B', 'C']) assert_raises(nx.NetworkXError,G.neighbors,'j') assert_raises(nx.NetworkXError,G.neighbors_iter,'j') def test_successors(self): G=self.G() G.add_nodes_from('GJK') G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('B', 'C'), ('C', 'D')]) assert_equal(sorted(G.successors('A')),['B', 'C']) assert_equal(sorted(G.successors_iter('A')),['B', 'C']) assert_equal(sorted(G.successors('G')),[]) assert_equal(sorted(G.successors('D')),[]) assert_equal(sorted(G.successors_iter('G')),[]) assert_raises(nx.NetworkXError,G.successors,'j') assert_raises(nx.NetworkXError,G.successors_iter,'j') def test_predecessors(self): G=self.G() G.add_nodes_from('GJK') G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('B', 'C'), ('C', 'D')]) assert_equal(sorted(G.predecessors('C')),['A', 'B']) assert_equal(sorted(G.predecessors_iter('C')),['A', 'B']) assert_equal(sorted(G.predecessors('G')),[]) assert_equal(sorted(G.predecessors('A')),[]) assert_equal(sorted(G.predecessors_iter('G')),[]) assert_equal(sorted(G.predecessors_iter('A')),[]) assert_equal(sorted(G.successors_iter('D')),[]) assert_raises(nx.NetworkXError,G.predecessors,'j') assert_raises(nx.NetworkXError,G.predecessors,'j') def test_reverse(self): G=nx.complete_graph(10) H=G.to_directed() HR=H.reverse() assert_true(nx.is_isomorphic(H,HR)) assert_equal(sorted(H.edges()),sorted(HR.edges())) def test_reverse2(self): H=nx.DiGraph() foo=[H.add_edge(u,u+1) for u in range(0,5)] HR=H.reverse() for u in range(0,5): assert_true(HR.has_edge(u+1,u)) def test_reverse3(self): H=nx.DiGraph() H.add_nodes_from([1,2,3,4]) HR=H.reverse() assert_equal(sorted(HR.nodes()),[1, 2, 3, 4]) networkx-1.8.1/networkx/classes/tests/historical_tests.py0000664000175000017500000004247512177456333023726 0ustar aricaric00000000000000#!/usr/bin/env python """Original NetworkX graph tests""" from nose.tools import * import networkx import networkx as nx from networkx import convert_node_labels_to_integers as cnlti from networkx.testing import * class HistoricalTests(object): def setUp(self): self.null=nx.null_graph() self.P1=cnlti(nx.path_graph(1),first_label=1) self.P3=cnlti(nx.path_graph(3),first_label=1) self.P10=cnlti(nx.path_graph(10),first_label=1) self.K1=cnlti(nx.complete_graph(1),first_label=1) self.K3=cnlti(nx.complete_graph(3),first_label=1) self.K4=cnlti(nx.complete_graph(4),first_label=1) self.K5=cnlti(nx.complete_graph(5),first_label=1) self.K10=cnlti(nx.complete_graph(10),first_label=1) self.G=nx.Graph def test_name(self): G=self.G(name="test") assert_equal(str(G),'test') assert_equal(G.name,'test') H=self.G() assert_equal(H.name,'') # Nodes def test_add_remove_node(self): G=self.G() G.add_node('A') assert_true(G.has_node('A')) G.remove_node('A') assert_false(G.has_node('A')) def test_nonhashable_node(self): # Test if a non-hashable object is in the Graph. A python dict will # raise a TypeError, but for a Graph class a simple False should be # returned (see Graph __contains__). If it cannot be a node then it is # not a node. G=self.G() assert_false(G.has_node(['A'])) assert_false(G.has_node({'A':1})) def test_add_nodes_from(self): G=self.G() G.add_nodes_from(list("ABCDEFGHIJKL")) assert_true(G.has_node("L")) G.remove_nodes_from(['H','I','J','K','L']) G.add_nodes_from([1,2,3,4]) assert_equal(sorted(G.nodes(),key=str), [1, 2, 3, 4, 'A', 'B', 'C', 'D', 'E', 'F', 'G']) # test __iter__ assert_equal(sorted(G,key=str), [1, 2, 3, 4, 'A', 'B', 'C', 'D', 'E', 'F', 'G']) def test_contains(self): G=self.G() G.add_node('A') assert_true('A' in G) assert_false([] in G) # never raise a Key or TypeError in this test assert_false({1:1} in G) def test_add_remove(self): # Test add_node and remove_node acting for various nbunch G=self.G() G.add_node('m') assert_true(G.has_node('m')) G.add_node('m') # no complaints assert_raises(nx.NetworkXError,G.remove_node,'j') G.remove_node('m') assert_equal(G.nodes(),[]) def test_nbunch_is_list(self): G=self.G() G.add_nodes_from(list("ABCD")) G.add_nodes_from(self.P3) # add nbunch of nodes (nbunch=Graph) assert_equal(sorted(G.nodes(),key=str), [1, 2, 3, 'A', 'B', 'C', 'D']) G.remove_nodes_from(self.P3) # remove nbunch of nodes (nbunch=Graph) assert_equal(sorted(G.nodes(),key=str), ['A', 'B', 'C', 'D']) def test_nbunch_is_set(self): G=self.G() nbunch=set("ABCDEFGHIJKL") G.add_nodes_from(nbunch) assert_true(G.has_node("L")) def test_nbunch_dict(self): # nbunch is a dict with nodes as keys G=self.G() nbunch=set("ABCDEFGHIJKL") G.add_nodes_from(nbunch) nbunch={'I':"foo",'J':2,'K':True,'L':"spam"} G.remove_nodes_from(nbunch) assert_true(sorted(G.nodes(),key=str), ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) def test_nbunch_iterator(self): G=self.G() G.add_nodes_from(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) n_iter=self.P3.nodes_iter() G.add_nodes_from(n_iter) assert_equal(sorted(G.nodes(),key=str), [1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) n_iter=self.P3.nodes_iter() # rebuild same iterator G.remove_nodes_from(n_iter) # remove nbunch of nodes (nbunch=iterator) assert_equal(sorted(G.nodes(),key=str), ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) def test_nbunch_graph(self): G=self.G() G.add_nodes_from(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) nbunch=self.K3 G.add_nodes_from(nbunch) assert_true(sorted(G.nodes(),key=str), [1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) # Edges def test_add_edge(self): G=self.G() assert_raises(TypeError,G.add_edge,'A') G.add_edge('A','B') # testing add_edge() G.add_edge('A','B') # should fail silently assert_true(G.has_edge('A','B')) assert_false(G.has_edge('A','C')) assert_true(G.has_edge( *('A','B') )) if G.is_directed(): assert_false(G.has_edge('B','A')) else: # G is undirected, so B->A is an edge assert_true(G.has_edge('B','A')) G.add_edge('A','C') # test directedness G.add_edge('C','A') G.remove_edge('C','A') if G.is_directed(): assert_true(G.has_edge('A','C')) else: assert_false(G.has_edge('A','C')) assert_false(G.has_edge('C','A')) def test_self_loop(self): G=self.G() G.add_edge('A','A') # test self loops assert_true(G.has_edge('A','A')) G.remove_edge('A','A') G.add_edge('X','X') assert_true(G.has_node('X')) G.remove_node('X') G.add_edge('A','Z') # should add the node silently assert_true(G.has_node('Z')) def test_add_edges_from(self): G=self.G() G.add_edges_from([('B','C')]) # test add_edges_from() assert_true(G.has_edge('B','C')) if G.is_directed(): assert_false(G.has_edge('C','B')) else: assert_true(G.has_edge('C','B')) # undirected G.add_edges_from([('D','F'),('B','D')]) assert_true(G.has_edge('D','F')) assert_true(G.has_edge('B','D')) if G.is_directed(): assert_false(G.has_edge('D','B')) else: assert_true(G.has_edge('D','B')) # undirected def test_add_edges_from2(self): G=self.G() # after failing silently, should add 2nd edge G.add_edges_from([tuple('IJ'),list('KK'),tuple('JK')]) assert_true(G.has_edge(*('I','J'))) assert_true(G.has_edge(*('K','K'))) assert_true(G.has_edge(*('J','K'))) if G.is_directed(): assert_false(G.has_edge(*('K','J'))) else: assert_true(G.has_edge(*('K','J'))) def test_add_edges_from3(self): G=self.G() G.add_edges_from(zip(list('ACD'),list('CDE'))) assert_true(G.has_edge('D','E')) assert_false(G.has_edge('E','C')) def test_remove_edge(self): G=self.G() G.add_nodes_from([1, 2, 3, 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H']) G.add_edges_from(zip(list('MNOP'),list('NOPM'))) assert_true(G.has_edge('O','P')) assert_true( G.has_edge('P','M')) G.remove_node('P') # tests remove_node()'s handling of edges. assert_false(G.has_edge('P','M')) assert_raises(TypeError,G.remove_edge,'M') G.add_edge('N','M') assert_true(G.has_edge('M','N')) G.remove_edge('M','N') assert_false(G.has_edge('M','N')) # self loop fails silently G.remove_edges_from([list('HI'),list('DF'), tuple('KK'),tuple('JK')]) assert_false(G.has_edge('H','I')) assert_false(G.has_edge('J','K')) G.remove_edges_from([list('IJ'),list('KK'),list('JK')]) assert_false(G.has_edge('I','J')) G.remove_nodes_from(set('ZEFHIMNO')) G.add_edge('J','K') def test_edges_nbunch(self): # Test G.edges(nbunch) with various forms of nbunch G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) # node not in nbunch should be quietly ignored assert_raises(nx.NetworkXError,G.edges,6) assert_equals(G.edges('Z'),[]) # iterable non-node # nbunch can be an empty list assert_equals(G.edges([]),[]) if G.is_directed(): elist=[('A', 'B'), ('A', 'C'), ('B', 'D')] else: elist=[('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')] # nbunch can be a list assert_edges_equal(G.edges(['A','B']),elist) # nbunch can be a set assert_edges_equal(G.edges(set(['A','B'])),elist) # nbunch can be a graph G1=self.G() G1.add_nodes_from('AB') assert_edges_equal(G.edges(G1),elist) # nbunch can be a dict with nodes as keys ndict={'A': "thing1", 'B': "thing2"} assert_edges_equal(G.edges(ndict),elist) # nbunch can be a single node assert_edges_equal(G.edges('A'), [('A', 'B'), ('A', 'C')]) assert_edges_equal(G.nodes_iter(), ['A', 'B', 'C', 'D']) def test_edges_iter_nbunch(self): G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) # Test G.edges_iter(nbunch) with various forms of nbunch # node not in nbunch should be quietly ignored assert_equals(list(G.edges_iter('Z')),[]) # nbunch can be an empty list assert_equals(sorted(G.edges_iter([])),[]) if G.is_directed(): elist=[('A', 'B'), ('A', 'C'), ('B', 'D')] else: elist=[('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')] # nbunch can be a list assert_edges_equal(G.edges_iter(['A','B']),elist) # nbunch can be a set assert_edges_equal(G.edges_iter(set(['A','B'])),elist) # nbunch can be a graph G1=self.G() G1.add_nodes_from(['A','B']) assert_edges_equal(G.edges_iter(G1),elist) # nbunch can be a dict with nodes as keys ndict={'A': "thing1", 'B': "thing2"} assert_edges_equal(G.edges_iter(ndict),elist) # nbunch can be a single node assert_edges_equal(G.edges_iter('A'), [('A', 'B'), ('A', 'C')]) # nbunch can be nothing (whole graph) assert_edges_equal(G.edges_iter(), [('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) def test_degree(self): G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) assert_equal(G.degree('A'),2) # degree of single node in iterable container must return dict assert_equal(list(G.degree(['A']).values()),[2]) assert_equal(G.degree(['A']),{'A': 2}) assert_equal(sorted(G.degree(['A','B']).values()),[2, 3]) assert_equal(G.degree(['A','B']),{'A': 2, 'B': 3}) assert_equal(sorted(G.degree().values()),[2, 2, 3, 3]) assert_equal(sorted([v for k,v in G.degree_iter()]), [2, 2, 3, 3]) def test_degree2(self): H=self.G() H.add_edges_from([(1,24),(1,2)]) assert_equal(sorted(H.degree([1,24]).values()),[1, 2]) def test_degree_graph(self): P3=nx.path_graph(3) P5=nx.path_graph(5) # silently ignore nodes not in P3 assert_equal(P3.degree(['A','B']),{}) # nbunch can be a graph assert_equal(sorted(P5.degree(P3).values()),[1, 2, 2]) # nbunch can be a graph thats way to big assert_equal(sorted(P3.degree(P5).values()),[1, 1, 2]) assert_equal(P5.degree([]),{}) assert_equal(list(P5.degree_iter([])),[]) assert_equal(dict(P5.degree_iter([])),{}) def test_null(self): null=nx.null_graph() assert_equal(null.degree(),{}) assert_equal(list(null.degree_iter()),[]) assert_equal(dict(null.degree_iter()),{}) def test_order_size(self): G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) assert_equal(G.order(),4) assert_equal(G.size(),5) assert_equal(G.number_of_edges(),5) assert_equal(G.number_of_edges('A','B'),1) assert_equal(G.number_of_edges('A','D'),0) def test_copy(self): G=self.G() H=G.copy() # copy assert_equal(H.adj,G.adj) assert_equal(H.name,G.name) assert_not_equal(H,G) def test_subgraph(self): G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) SG=G.subgraph(['A','B','D']) assert_nodes_equal(SG.nodes(),['A', 'B', 'D']) assert_edges_equal(SG.edges(),[('A', 'B'), ('B', 'D')]) def test_to_directed(self): G=self.G() if not G.is_directed(): G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) DG=G.to_directed() assert_not_equal(DG,G) # directed copy or copy assert_true(DG.is_directed()) assert_equal(DG.name,G.name) assert_equal(DG.adj,G.adj) assert_equal(sorted(DG.out_edges(list('AB'))), [('A', 'B'), ('A', 'C'), ('B', 'A'), ('B', 'C'), ('B', 'D')]) DG.remove_edge('A','B') assert_true(DG.has_edge('B','A')) # this removes B-A but not A-B assert_false(DG.has_edge('A','B')) def test_to_undirected(self): G=self.G() if G.is_directed(): G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) UG=G.to_undirected() # to_undirected assert_not_equal(UG,G) assert_false(UG.is_directed()) assert_true(G.is_directed()) assert_equal(UG.name,G.name) assert_not_equal(UG.adj,G.adj) assert_equal(sorted(UG.edges(list('AB'))), [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')]) assert_equal(sorted(UG.edges(['A','B'])), [('A', 'B'), ('A', 'C'), ('B', 'C'), ('B', 'D')]) UG.remove_edge('A','B') assert_false(UG.has_edge('B','A')) assert_false( UG.has_edge('A','B')) def test_neighbors(self): G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) G.add_nodes_from('GJK') assert_equal(sorted(G['A']),['B', 'C']) assert_equal(sorted(G.neighbors('A')),['B', 'C']) assert_equal(sorted(G.neighbors_iter('A')),['B', 'C']) assert_equal(sorted(G.neighbors('G')),[]) assert_raises(nx.NetworkXError,G.neighbors,'j') def test_iterators(self): G=self.G() G.add_edges_from([('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) G.add_nodes_from('GJK') assert_equal(sorted(G.nodes_iter()), ['A', 'B', 'C', 'D', 'G', 'J', 'K']) assert_edges_equal(G.edges_iter(), [('A', 'B'), ('A', 'C'), ('B', 'D'), ('C', 'B'), ('C', 'D')]) assert_equal(sorted([v for k,v in G.degree_iter()]), [0, 0, 0, 2, 2, 3, 3]) assert_equal(sorted(G.degree_iter(),key=str), [('A', 2), ('B', 3), ('C', 3), ('D', 2), ('G', 0), ('J', 0), ('K', 0)]) assert_equal(sorted(G.neighbors_iter('A')),['B', 'C']) assert_raises(nx.NetworkXError,G.neighbors_iter,'X') G.clear() assert_equal(nx.number_of_nodes(G),0) assert_equal(nx.number_of_edges(G),0) def test_null_subgraph(self): # Subgraph of a null graph is a null graph nullgraph=nx.null_graph() G=nx.null_graph() H=G.subgraph([]) assert_true(nx.is_isomorphic(H,nullgraph)) def test_empty_subgraph(self): # Subgraph of an empty graph is an empty graph. test 1 nullgraph=nx.null_graph() E5=nx.empty_graph(5) E10=nx.empty_graph(10) H=E10.subgraph([]) assert_true(nx.is_isomorphic(H,nullgraph)) H=E10.subgraph([1,2,3,4,5]) assert_true(nx.is_isomorphic(H,E5)) def test_complete_subgraph(self): # Subgraph of a complete graph is a complete graph K1=nx.complete_graph(1) K3=nx.complete_graph(3) K5=nx.complete_graph(5) H=K5.subgraph([1,2,3]) assert_true(nx.is_isomorphic(H,K3)) def test_subgraph_nbunch(self): nullgraph=nx.null_graph() K1=nx.complete_graph(1) K3=nx.complete_graph(3) K5=nx.complete_graph(5) # Test G.subgraph(nbunch), where nbunch is a single node H=K5.subgraph(1) assert_true(nx.is_isomorphic(H,K1)) # Test G.subgraph(nbunch), where nbunch is a set H=K5.subgraph(set([1])) assert_true(nx.is_isomorphic(H,K1)) # Test G.subgraph(nbunch), where nbunch is an iterator H=K5.subgraph(iter(K3)) assert_true(nx.is_isomorphic(H,K3)) # Test G.subgraph(nbunch), where nbunch is another graph H=K5.subgraph(K3) assert_true(nx.is_isomorphic(H,K3)) H=K5.subgraph([9]) assert_true(nx.is_isomorphic(H,nullgraph)) def test_node_tuple_error(self): H=self.G() # Test error handling of tuple as a node assert_raises(nx.NetworkXError,H.remove_node,(1,2)) H.remove_nodes_from([(1,2)]) # no error assert_raises(nx.NetworkXError,H.neighbors,(1,2)) networkx-1.8.1/networkx/classes/tests/test_multidigraph.py0000664000175000017500000003064212177456333024064 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx from test_multigraph import BaseMultiGraphTester, TestMultiGraph class BaseMultiDiGraphTester(BaseMultiGraphTester): def test_edges(self): G=self.K3 assert_equal(sorted(G.edges()),[(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.edges(0)),[(0,1),(0,2)]) assert_raises((KeyError,networkx.NetworkXError), G.edges,-1) def test_edges_data(self): G=self.K3 assert_equal(sorted(G.edges(data=True)), [(0,1,{}),(0,2,{}),(1,0,{}),(1,2,{}),(2,0,{}),(2,1,{})]) assert_equal(sorted(G.edges(0,data=True)),[(0,1,{}),(0,2,{})]) assert_raises((KeyError,networkx.NetworkXError), G.neighbors,-1) def test_edges_iter(self): G=self.K3 assert_equal(sorted(G.edges_iter()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.edges_iter(0)),[(0,1),(0,2)]) G.add_edge(0,1) assert_equal(sorted(G.edges_iter()), [(0,1),(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) def test_out_edges(self): G=self.K3 assert_equal(sorted(G.out_edges()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.out_edges(0)),[(0,1),(0,2)]) assert_raises((KeyError,networkx.NetworkXError), G.out_edges,-1) assert_equal(sorted(G.out_edges(0,keys=True)),[(0,1,0),(0,2,0)]) def test_out_edges_iter(self): G=self.K3 assert_equal(sorted(G.out_edges_iter()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.out_edges_iter(0)),[(0,1),(0,2)]) G.add_edge(0,1,2) assert_equal(sorted(G.out_edges_iter()), [(0,1),(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) def test_in_edges(self): G=self.K3 assert_equal(sorted(G.in_edges()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.in_edges(0)),[(1,0),(2,0)]) assert_raises((KeyError,networkx.NetworkXError), G.in_edges,-1) G.add_edge(0,1,2) assert_equal(sorted(G.in_edges()), [(0,1),(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.in_edges(0,keys=True)),[(1,0,0),(2,0,0)]) def test_in_edges_iter(self): G=self.K3 assert_equal(sorted(G.in_edges_iter()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.in_edges_iter(0)),[(1,0),(2,0)]) G.add_edge(0,1,2) assert_equal(sorted(G.in_edges_iter()), [(0,1),(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.in_edges_iter(data=True,keys=False)), [(0,1,{}),(0,1,{}),(0,2,{}),(1,0,{}),(1,2,{}), (2,0,{}),(2,1,{})]) def is_shallow(self,H,G): # graph assert_equal(G.graph['foo'],H.graph['foo']) G.graph['foo'].append(1) assert_equal(G.graph['foo'],H.graph['foo']) # node assert_equal(G.node[0]['foo'],H.node[0]['foo']) G.node[0]['foo'].append(1) assert_equal(G.node[0]['foo'],H.node[0]['foo']) # edge assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) G[1][2][0]['foo'].append(1) assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) def is_deep(self,H,G): # graph assert_equal(G.graph['foo'],H.graph['foo']) G.graph['foo'].append(1) assert_not_equal(G.graph['foo'],H.graph['foo']) # node assert_equal(G.node[0]['foo'],H.node[0]['foo']) G.node[0]['foo'].append(1) assert_not_equal(G.node[0]['foo'],H.node[0]['foo']) # edge assert_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) G[1][2][0]['foo'].append(1) assert_not_equal(G[1][2][0]['foo'],H[1][2][0]['foo']) def test_to_undirected(self): # MultiDiGraph -> MultiGraph changes number of edges so it is # not a copy operation... use is_shallow, not is_shallow_copy G=self.K3 self.add_attributes(G) H=networkx.MultiGraph(G) self.is_shallow(H,G) H=G.to_undirected() self.is_deep(H,G) def test_has_successor(self): G=self.K3 assert_equal(G.has_successor(0,1),True) assert_equal(G.has_successor(0,-1),False) def test_successors(self): G=self.K3 assert_equal(sorted(G.successors(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.successors,-1) def test_successors_iter(self): G=self.K3 assert_equal(sorted(G.successors_iter(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.successors_iter,-1) def test_has_predecessor(self): G=self.K3 assert_equal(G.has_predecessor(0,1),True) assert_equal(G.has_predecessor(0,-1),False) def test_predecessors(self): G=self.K3 assert_equal(sorted(G.predecessors(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.predecessors,-1) def test_predecessors_iter(self): G=self.K3 assert_equal(sorted(G.predecessors_iter(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.predecessors_iter,-1) def test_degree(self): G=self.K3 assert_equal(list(G.degree().values()),[4,4,4]) assert_equal(G.degree(),{0:4,1:4,2:4}) assert_equal(G.degree(0),4) assert_equal(G.degree([0]),{0:4}) assert_raises((KeyError,networkx.NetworkXError), G.degree,-1) def test_degree_iter(self): G=self.K3 assert_equal(list(G.degree_iter()),[(0,4),(1,4),(2,4)]) assert_equal(dict(G.degree_iter()),{0:4,1:4,2:4}) assert_equal(list(G.degree_iter(0)),[(0,4)]) G.add_edge(0,1,weight=0.3,other=1.2) assert_equal(list(G.degree_iter(weight='weight')),[(0,4.3),(1,4.3),(2,4)]) assert_equal(list(G.degree_iter(weight='other')),[(0,5.2),(1,5.2),(2,4)]) def test_in_degree(self): G=self.K3 assert_equal(list(G.in_degree().values()),[2,2,2]) assert_equal(G.in_degree(),{0:2,1:2,2:2}) assert_equal(G.in_degree(0),2) assert_equal(G.in_degree([0]),{0:2}) assert_raises((KeyError,networkx.NetworkXError), G.in_degree,-1) def test_in_degree_iter(self): G=self.K3 assert_equal(list(G.in_degree_iter()),[(0,2),(1,2),(2,2)]) assert_equal(dict(G.in_degree_iter()),{0:2,1:2,2:2}) assert_equal(list(G.in_degree_iter(0)),[(0,2)]) assert_equal(list(G.in_degree_iter(0,weight='weight')),[(0,2)]) def test_out_degree(self): G=self.K3 assert_equal(list(G.out_degree().values()),[2,2,2]) assert_equal(G.out_degree(),{0:2,1:2,2:2}) assert_equal(G.out_degree(0),2) assert_equal(G.out_degree([0]),{0:2}) assert_raises((KeyError,networkx.NetworkXError), G.out_degree,-1) def test_out_degree_iter(self): G=self.K3 assert_equal(list(G.out_degree_iter()),[(0,2),(1,2),(2,2)]) assert_equal(dict(G.out_degree_iter()),{0:2,1:2,2:2}) assert_equal(list(G.out_degree_iter(0)),[(0,2)]) assert_equal(list(G.out_degree_iter(0,weight='weight')),[(0,2)]) def test_size(self): G=self.K3 assert_equal(G.size(),6) assert_equal(G.number_of_edges(),6) G.add_edge(0,1,weight=0.3,other=1.2) assert_equal(G.size(weight='weight'),6.3) assert_equal(G.size(weight='other'),7.2) def test_to_undirected_reciprocal(self): G=self.Graph() G.add_edge(1,2) assert_true(G.to_undirected().has_edge(1,2)) assert_false(G.to_undirected(reciprocal=True).has_edge(1,2)) G.add_edge(2,1) assert_true(G.to_undirected(reciprocal=True).has_edge(1,2)) def test_reverse_copy(self): G=networkx.MultiDiGraph([(0,1),(0,1)]) R=G.reverse() assert_equal(sorted(R.edges()),[(1,0),(1,0)]) R.remove_edge(1,0) assert_equal(sorted(R.edges()),[(1,0)]) assert_equal(sorted(G.edges()),[(0,1),(0,1)]) def test_reverse_nocopy(self): G=networkx.MultiDiGraph([(0,1),(0,1)]) R=G.reverse(copy=False) assert_equal(sorted(R.edges()),[(1,0),(1,0)]) R.remove_edge(1,0) assert_equal(sorted(R.edges()),[(1,0)]) assert_equal(sorted(G.edges()),[(1,0)]) class TestMultiDiGraph(BaseMultiDiGraphTester,TestMultiGraph): def setUp(self): self.Graph=networkx.MultiDiGraph # build K3 self.k3edges=[(0, 1), (0, 2), (1, 2)] self.k3nodes=[0, 1, 2] self.K3=self.Graph() self.K3.adj={0:{},1:{},2:{}} self.K3.succ=self.K3.adj self.K3.pred={0:{},1:{},2:{}} for u in self.k3nodes: for v in self.k3nodes: if u==v: continue d={0:{}} self.K3.succ[u][v]=d self.K3.pred[v][u]=d self.K3.adj=self.K3.succ self.K3.edge=self.K3.adj self.K3.node={} self.K3.node[0]={} self.K3.node[1]={} self.K3.node[2]={} def test_add_edge(self): G=self.Graph() G.add_edge(0,1) assert_equal(G.adj,{0: {1: {0:{}}}, 1: {}}) assert_equal(G.succ,{0: {1: {0:{}}}, 1: {}}) assert_equal(G.pred,{0: {}, 1: {0:{0:{}}}}) G=self.Graph() G.add_edge(*(0,1)) assert_equal(G.adj,{0: {1: {0:{}}}, 1: {}}) assert_equal(G.succ,{0: {1: {0:{}}}, 1: {}}) assert_equal(G.pred,{0: {}, 1: {0:{0:{}}}}) def test_add_edges_from(self): G=self.Graph() G.add_edges_from([(0,1),(0,1,{'weight':3})]) assert_equal(G.adj,{0: {1: {0:{},1:{'weight':3}}}, 1: {}}) assert_equal(G.succ,{0: {1: {0:{},1:{'weight':3}}}, 1: {}}) assert_equal(G.pred,{0: {}, 1: {0:{0:{},1:{'weight':3}}}}) G.add_edges_from([(0,1),(0,1,{'weight':3})],weight=2) assert_equal(G.succ,{0: {1: {0:{}, 1:{'weight':3}, 2:{'weight':2}, 3:{'weight':3}}}, 1: {}}) assert_equal(G.pred,{0: {}, 1: {0:{0:{},1:{'weight':3}, 2:{'weight':2}, 3:{'weight':3}}}}) assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,)]) # too few in tuple assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,1,2,3,4)]) # too many in tuple assert_raises(TypeError, G.add_edges_from,[0]) # not a tuple def test_remove_edge(self): G=self.K3 G.remove_edge(0,1) assert_equal(G.succ,{0:{2:{0:{}}}, 1:{0:{0:{}},2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) assert_equal(G.pred,{0:{1:{0:{}}, 2:{0:{}}}, 1:{2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,0,2, key=1) def test_remove_multiedge(self): G=self.K3 G.add_edge(0,1,key='parallel edge') G.remove_edge(0,1,key='parallel edge') assert_equal(G.adj,{0: {1: {0:{}}, 2: {0:{}}}, 1: {0: {0:{}}, 2: {0:{}}}, 2: {0: {0:{}}, 1: {0:{}}}}) assert_equal(G.succ,{0: {1: {0:{}}, 2: {0:{}}}, 1: {0: {0:{}}, 2: {0:{}}}, 2: {0: {0:{}}, 1: {0:{}}}}) assert_equal(G.pred,{0:{1: {0:{}},2:{0:{}}}, 1:{0:{0:{}},2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) G.remove_edge(0,1) assert_equal(G.succ,{0:{2:{0:{}}}, 1:{0:{0:{}},2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) assert_equal(G.pred,{0:{1:{0:{}}, 2:{0:{}}}, 1:{2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0) def test_remove_edges_from(self): G=self.K3 G.remove_edges_from([(0,1)]) assert_equal(G.succ,{0:{2:{0:{}}}, 1:{0:{0:{}},2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) assert_equal(G.pred,{0:{1:{0:{}}, 2:{0:{}}}, 1:{2:{0:{}}}, 2:{0:{0:{}},1:{0:{}}}}) G.remove_edges_from([(0,0)]) # silent fail networkx-1.8.1/networkx/classes/tests/test_digraph.py0000664000175000017500000002401612177456333023007 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx from test_graph import BaseGraphTester, BaseAttrGraphTester, TestGraph class BaseDiGraphTester(BaseGraphTester): def test_has_successor(self): G=self.K3 assert_equal(G.has_successor(0,1),True) assert_equal(G.has_successor(0,-1),False) def test_successors(self): G=self.K3 assert_equal(sorted(G.successors(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.successors,-1) def test_successors_iter(self): G=self.K3 assert_equal(sorted(G.successors_iter(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.successors_iter,-1) def test_has_predecessor(self): G=self.K3 assert_equal(G.has_predecessor(0,1),True) assert_equal(G.has_predecessor(0,-1),False) def test_predecessors(self): G=self.K3 assert_equal(sorted(G.predecessors(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.predecessors,-1) def test_predecessors_iter(self): G=self.K3 assert_equal(sorted(G.predecessors_iter(0)),[1,2]) assert_raises((KeyError,networkx.NetworkXError), G.predecessors_iter,-1) def test_edges(self): G=self.K3 assert_equal(sorted(G.edges()),[(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.edges(0)),[(0,1),(0,2)]) assert_raises((KeyError,networkx.NetworkXError), G.edges,-1) def test_edges_iter(self): G=self.K3 assert_equal(sorted(G.edges_iter()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.edges_iter(0)),[(0,1),(0,2)]) def test_edges_data(self): G=self.K3 assert_equal(sorted(G.edges(data=True)), [(0,1,{}),(0,2,{}),(1,0,{}),(1,2,{}),(2,0,{}),(2,1,{})]) assert_equal(sorted(G.edges(0,data=True)),[(0,1,{}),(0,2,{})]) assert_raises((KeyError,networkx.NetworkXError), G.edges,-1) def test_out_edges(self): G=self.K3 assert_equal(sorted(G.out_edges()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.out_edges(0)),[(0,1),(0,2)]) assert_raises((KeyError,networkx.NetworkXError), G.out_edges,-1) def test_out_edges_iter(self): G=self.K3 assert_equal(sorted(G.out_edges_iter()), [(0,1),(0,2),(1,0),(1,2),(2,0),(2,1)]) assert_equal(sorted(G.edges_iter(0)),[(0,1),(0,2)]) def test_out_edges_dir(self): G=self.P3 assert_equal(sorted(G.out_edges()),[(0, 1), (1, 2)]) assert_equal(sorted(G.out_edges(0)),[(0, 1)]) assert_equal(sorted(G.out_edges(2)),[]) def test_out_edges_iter_dir(self): G=self.P3 assert_equal(sorted(G.out_edges_iter()),[(0, 1), (1, 2)]) assert_equal(sorted(G.out_edges_iter(0)),[(0, 1)]) assert_equal(sorted(G.out_edges_iter(2)),[]) def test_in_edges_dir(self): G=self.P3 assert_equal(sorted(G.in_edges()),[(0, 1), (1, 2)]) assert_equal(sorted(G.in_edges(0)),[]) assert_equal(sorted(G.in_edges(2)),[(1,2)]) def test_in_edges_iter_dir(self): G=self.P3 assert_equal(sorted(G.in_edges_iter()),[(0, 1), (1, 2)]) assert_equal(sorted(G.in_edges_iter(0)),[]) assert_equal(sorted(G.in_edges_iter(2)),[(1,2)]) def test_degree(self): G=self.K3 assert_equal(list(G.degree().values()),[4,4,4]) assert_equal(G.degree(),{0:4,1:4,2:4}) assert_equal(G.degree(0),4) assert_equal(G.degree([0]),{0:4}) assert_raises((KeyError,networkx.NetworkXError), G.degree,-1) def test_degree_iter(self): G=self.K3 assert_equal(list(G.degree_iter()),[(0,4),(1,4),(2,4)]) assert_equal(dict(G.degree_iter()),{0:4,1:4,2:4}) assert_equal(list(G.degree_iter(0)),[(0,4)]) def test_in_degree(self): G=self.K3 assert_equal(list(G.in_degree().values()),[2,2,2]) assert_equal(G.in_degree(),{0:2,1:2,2:2}) assert_equal(G.in_degree(0),2) assert_equal(G.in_degree([0]),{0:2}) assert_raises((KeyError,networkx.NetworkXError), G.in_degree,-1) def test_in_degree_iter(self): G=self.K3 assert_equal(list(G.in_degree_iter()),[(0,2),(1,2),(2,2)]) assert_equal(dict(G.in_degree_iter()),{0:2,1:2,2:2}) assert_equal(list(G.in_degree_iter(0)),[(0,2)]) def test_in_degree_iter_weighted(self): G=self.K3 G.add_edge(0,1,weight=0.3,other=1.2) assert_equal(list(G.in_degree_iter(weight='weight')),[(0,2),(1,1.3),(2,2)]) assert_equal(dict(G.in_degree_iter(weight='weight')),{0:2,1:1.3,2:2}) assert_equal(list(G.in_degree_iter(1,weight='weight')),[(1,1.3)]) assert_equal(list(G.in_degree_iter(weight='other')),[(0,2),(1,2.2),(2,2)]) assert_equal(dict(G.in_degree_iter(weight='other')),{0:2,1:2.2,2:2}) assert_equal(list(G.in_degree_iter(1,weight='other')),[(1,2.2)]) def test_out_degree(self): G=self.K3 assert_equal(list(G.out_degree().values()),[2,2,2]) assert_equal(G.out_degree(),{0:2,1:2,2:2}) assert_equal(G.out_degree(0),2) assert_equal(G.out_degree([0]),{0:2}) assert_raises((KeyError,networkx.NetworkXError), G.out_degree,-1) def test_out_degree_iter_weighted(self): G=self.K3 G.add_edge(0,1,weight=0.3,other=1.2) assert_equal(list(G.out_degree_iter(weight='weight')),[(0,1.3),(1,2),(2,2)]) assert_equal(dict(G.out_degree_iter(weight='weight')),{0:1.3,1:2,2:2}) assert_equal(list(G.out_degree_iter(0,weight='weight')),[(0,1.3)]) assert_equal(list(G.out_degree_iter(weight='other')),[(0,2.2),(1,2),(2,2)]) assert_equal(dict(G.out_degree_iter(weight='other')),{0:2.2,1:2,2:2}) assert_equal(list(G.out_degree_iter(0,weight='other')),[(0,2.2)]) def test_out_degree_iter(self): G=self.K3 assert_equal(list(G.out_degree_iter()),[(0,2),(1,2),(2,2)]) assert_equal(dict(G.out_degree_iter()),{0:2,1:2,2:2}) assert_equal(list(G.out_degree_iter(0)),[(0,2)]) def test_size(self): G=self.K3 assert_equal(G.size(),6) assert_equal(G.number_of_edges(),6) def test_to_undirected_reciprocal(self): G=self.Graph() G.add_edge(1,2) assert_true(G.to_undirected().has_edge(1,2)) assert_false(G.to_undirected(reciprocal=True).has_edge(1,2)) G.add_edge(2,1) assert_true(G.to_undirected(reciprocal=True).has_edge(1,2)) def test_reverse_copy(self): G=networkx.DiGraph([(0,1),(1,2)]) R=G.reverse() assert_equal(sorted(R.edges()),[(1,0),(2,1)]) R.remove_edge(1,0) assert_equal(sorted(R.edges()),[(2,1)]) assert_equal(sorted(G.edges()),[(0,1),(1,2)]) def test_reverse_nocopy(self): G=networkx.DiGraph([(0,1),(1,2)]) R=G.reverse(copy=False) assert_equal(sorted(R.edges()),[(1,0),(2,1)]) R.remove_edge(1,0) assert_equal(sorted(R.edges()),[(2,1)]) assert_equal(sorted(G.edges()),[(2,1)]) class BaseAttrDiGraphTester(BaseDiGraphTester,BaseAttrGraphTester): pass class TestDiGraph(BaseAttrDiGraphTester,TestGraph): """Tests specific to dict-of-dict-of-dict digraph data structure""" def setUp(self): self.Graph=networkx.DiGraph # build dict-of-dict-of-dict K3 ed1,ed2,ed3,ed4,ed5,ed6 = ({},{},{},{},{},{}) self.k3adj={0: {1: ed1, 2: ed2}, 1: {0: ed3, 2: ed4}, 2: {0: ed5, 1:ed6}} self.k3edges=[(0, 1), (0, 2), (1, 2)] self.k3nodes=[0, 1, 2] self.K3=self.Graph() self.K3.adj = self.K3.succ = self.K3.edge = self.k3adj self.K3.pred={0: {1: ed3, 2: ed5}, 1: {0: ed1, 2: ed6}, 2: {0: ed2, 1:ed4}} ed1,ed2 = ({},{}) self.P3=self.Graph() self.P3.adj={0: {1: ed1}, 1: {2: ed2}, 2: {}} self.P3.succ=self.P3.adj self.P3.pred={0: {}, 1: {0: ed1}, 2: {1: ed2}} self.K3.node={} self.K3.node[0]={} self.K3.node[1]={} self.K3.node[2]={} self.P3.node={} self.P3.node[0]={} self.P3.node[1]={} self.P3.node[2]={} def test_data_input(self): G=self.Graph(data={1:[2],2:[1]}, name="test") assert_equal(G.name,"test") assert_equal(sorted(G.adj.items()),[(1, {2: {}}), (2, {1: {}})]) assert_equal(sorted(G.succ.items()),[(1, {2: {}}), (2, {1: {}})]) assert_equal(sorted(G.pred.items()),[(1, {2: {}}), (2, {1: {}})]) def test_add_edge(self): G=self.Graph() G.add_edge(0,1) assert_equal(G.adj,{0: {1: {}}, 1: {}}) assert_equal(G.succ,{0: {1: {}}, 1: {}}) assert_equal(G.pred,{0: {}, 1: {0:{}}}) G=self.Graph() G.add_edge(*(0,1)) assert_equal(G.adj,{0: {1: {}}, 1: {}}) assert_equal(G.succ,{0: {1: {}}, 1: {}}) assert_equal(G.pred,{0: {}, 1: {0:{}}}) def test_add_edges_from(self): G=self.Graph() G.add_edges_from([(0,1),(0,2,{'data':3})],data=2) assert_equal(G.adj,{0: {1: {'data':2}, 2: {'data':3}}, 1: {}, 2: {}}) assert_equal(G.succ,{0: {1: {'data':2}, 2: {'data':3}}, 1: {}, 2: {}}) assert_equal(G.pred,{0: {}, 1: {0: {'data':2}}, 2: {0: {'data':3}}}) assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,)]) # too few in tuple assert_raises(networkx.NetworkXError, G.add_edges_from,[(0,1,2,3)]) # too many in tuple assert_raises(TypeError, G.add_edges_from,[0]) # not a tuple def test_remove_edge(self): G=self.K3 G.remove_edge(0,1) assert_equal(G.succ,{0:{2:{}},1:{0:{},2:{}},2:{0:{},1:{}}}) assert_equal(G.pred,{0:{1:{}, 2:{}}, 1:{2:{}}, 2:{0:{},1:{}}}) assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0) def test_remove_edges_from(self): G=self.K3 G.remove_edges_from([(0,1)]) assert_equal(G.succ,{0:{2:{}},1:{0:{},2:{}},2:{0:{},1:{}}}) assert_equal(G.pred,{0:{1:{}, 2:{}}, 1:{2:{}}, 2:{0:{},1: {}}}) G.remove_edges_from([(0,0)]) # silent fail networkx-1.8.1/networkx/classes/tests/test_function.py0000664000175000017500000002027212177456333023216 0ustar aricaric00000000000000#!/usr/bin/env python import random from nose.tools import * import networkx import networkx as nx class TestFunction(object): def setUp(self): self.G=networkx.Graph({0:[1,2,3], 1:[1,2,0], 4:[]}, name='Test') self.Gdegree={0:3, 1:2, 2:2, 3:1, 4:0} self.Gnodes=list(range(5)) self.Gedges=[(0,1),(0,2),(0,3),(1,0),(1,1),(1,2)] self.DG=networkx.DiGraph({0:[1,2,3], 1:[1,2,0], 4:[]}) self.DGin_degree={0:1, 1:2, 2:2, 3:1, 4:0} self.DGout_degree={0:3, 1:3, 2:0, 3:0, 4:0} self.DGnodes=list(range(5)) self.DGedges=[(0,1),(0,2),(0,3),(1,0),(1,1),(1,2)] def test_nodes(self): assert_equal(self.G.nodes(),networkx.nodes(self.G)) assert_equal(self.DG.nodes(),networkx.nodes(self.DG)) def test_edges(self): assert_equal(self.G.edges(),networkx.edges(self.G)) assert_equal(self.DG.edges(),networkx.edges(self.DG)) assert_equal(self.G.edges(nbunch=[0,1,3]),networkx.edges(self.G,nbunch=[0,1,3])) assert_equal(self.DG.edges(nbunch=[0,1,3]),networkx.edges(self.DG,nbunch=[0,1,3])) def test_nodes_iter(self): assert_equal(list(self.G.nodes_iter()),list(networkx.nodes_iter(self.G))) assert_equal(list(self.DG.nodes_iter()),list(networkx.nodes_iter(self.DG))) def test_edges_iter(self): assert_equal(list(self.G.edges_iter()),list(networkx.edges_iter(self.G))) assert_equal(list(self.DG.edges_iter()),list(networkx.edges_iter(self.DG))) assert_equal(list(self.G.edges_iter(nbunch=[0,1,3])),list(networkx.edges_iter(self.G,nbunch=[0,1,3]))) assert_equal(list(self.DG.edges_iter(nbunch=[0,1,3])),list(networkx.edges_iter(self.DG,nbunch=[0,1,3]))) def test_degree(self): assert_equal(self.G.degree(),networkx.degree(self.G)) assert_equal(self.DG.degree(),networkx.degree(self.DG)) assert_equal(self.G.degree(nbunch=[0,1]),networkx.degree(self.G,nbunch=[0,1])) assert_equal(self.DG.degree(nbunch=[0,1]),networkx.degree(self.DG,nbunch=[0,1])) assert_equal(self.G.degree(weight='weight'),networkx.degree(self.G,weight='weight')) assert_equal(self.DG.degree(weight='weight'),networkx.degree(self.DG,weight='weight')) def test_neighbors(self): assert_equal(self.G.neighbors(1),networkx.neighbors(self.G,1)) assert_equal(self.DG.neighbors(1),networkx.neighbors(self.DG,1)) def test_number_of_nodes(self): assert_equal(self.G.number_of_nodes(),networkx.number_of_nodes(self.G)) assert_equal(self.DG.number_of_nodes(),networkx.number_of_nodes(self.DG)) def test_number_of_edges(self): assert_equal(self.G.number_of_edges(),networkx.number_of_edges(self.G)) assert_equal(self.DG.number_of_edges(),networkx.number_of_edges(self.DG)) def test_is_directed(self): assert_equal(self.G.is_directed(),networkx.is_directed(self.G)) assert_equal(self.DG.is_directed(),networkx.is_directed(self.DG)) def test_subgraph(self): assert_equal(self.G.subgraph([0,1,2,4]).adj,networkx.subgraph(self.G,[0,1,2,4]).adj) assert_equal(self.DG.subgraph([0,1,2,4]).adj,networkx.subgraph(self.DG,[0,1,2,4]).adj) def test_create_empty_copy(self): G=networkx.create_empty_copy(self.G, with_nodes=False) assert_equal(G.nodes(),[]) assert_equal(G.graph,{}) assert_equal(G.node,{}) assert_equal(G.edge,{}) G=networkx.create_empty_copy(self.G) assert_equal(G.nodes(),self.G.nodes()) assert_equal(G.graph,{}) assert_equal(G.node,{}.fromkeys(self.G.nodes(),{})) assert_equal(G.edge,{}.fromkeys(self.G.nodes(),{})) def test_degree_histogram(self): assert_equal(networkx.degree_histogram(self.G), [1,1,1,1,1]) def test_density(self): assert_equal(networkx.density(self.G), 0.5) assert_equal(networkx.density(self.DG), 0.3) G=networkx.Graph() G.add_node(1) assert_equal(networkx.density(G), 0.0) def test_density_selfloop(self): G = nx.Graph() G.add_edge(1,1) assert_equal(networkx.density(G), 0.0) G.add_edge(1,2) assert_equal(networkx.density(G), 2.0) def test_freeze(self): G=networkx.freeze(self.G) assert_equal(G.frozen,True) assert_raises(networkx.NetworkXError, G.add_node, 1) assert_raises(networkx.NetworkXError, G.add_nodes_from, [1]) assert_raises(networkx.NetworkXError, G.remove_node, 1) assert_raises(networkx.NetworkXError, G.remove_nodes_from, [1]) assert_raises(networkx.NetworkXError, G.add_edge, 1,2) assert_raises(networkx.NetworkXError, G.add_edges_from, [(1,2)]) assert_raises(networkx.NetworkXError, G.remove_edge, 1,2) assert_raises(networkx.NetworkXError, G.remove_edges_from, [(1,2)]) assert_raises(networkx.NetworkXError, G.clear) def test_is_frozen(self): assert_equal(networkx.is_frozen(self.G), False) G=networkx.freeze(self.G) assert_equal(G.frozen, networkx.is_frozen(self.G)) assert_equal(G.frozen,True) def test_info(self): G=networkx.path_graph(5) info=networkx.info(G) expected_graph_info='\n'.join(['Name: path_graph(5)', 'Type: Graph', 'Number of nodes: 5', 'Number of edges: 4', 'Average degree: 1.6000']) assert_equal(info,expected_graph_info) info=networkx.info(G,n=1) expected_node_info='\n'.join( ['Node 1 has the following properties:', 'Degree: 2', 'Neighbors: 0 2']) assert_equal(info,expected_node_info) def test_info_digraph(self): G=networkx.DiGraph(name='path_graph(5)') G.add_path([0,1,2,3,4]) info=networkx.info(G) expected_graph_info='\n'.join(['Name: path_graph(5)', 'Type: DiGraph', 'Number of nodes: 5', 'Number of edges: 4', 'Average in degree: 0.8000', 'Average out degree: 0.8000']) assert_equal(info,expected_graph_info) info=networkx.info(G,n=1) expected_node_info='\n'.join( ['Node 1 has the following properties:', 'Degree: 2', 'Neighbors: 2']) assert_equal(info,expected_node_info) assert_raises(networkx.NetworkXError,networkx.info,G,n=-1) def test_neighbors(self): graph = nx.complete_graph(100) pop = random.sample(graph.nodes(), 1) nbors = list(nx.neighbors(graph, pop[0])) # should be all the other vertices in the graph assert_equal(len(nbors), len(graph) - 1) graph = nx.path_graph(100) node = random.sample(graph.nodes(), 1)[0] nbors = list(nx.neighbors(graph, node)) # should be all the other vertices in the graph if node != 0 and node != 99: assert_equal(len(nbors), 2) else: assert_equal(len(nbors), 1) # create a star graph with 99 outer nodes graph = nx.star_graph(99) nbors = list(nx.neighbors(graph, 0)) assert_equal(len(nbors), 99) def test_non_neighbors(self): graph = nx.complete_graph(100) pop = random.sample(graph.nodes(), 1) nbors = list(nx.non_neighbors(graph, pop[0])) # should be all the other vertices in the graph assert_equal(len(nbors), 0) graph = nx.path_graph(100) node = random.sample(graph.nodes(), 1)[0] nbors = list(nx.non_neighbors(graph, node)) # should be all the other vertices in the graph if node != 0 and node != 99: assert_equal(len(nbors), 97) else: assert_equal(len(nbors), 98) # create a star graph with 99 outer nodes graph = nx.star_graph(99) nbors = list(nx.non_neighbors(graph, 0)) assert_equal(len(nbors), 0) # disconnected graph graph = nx.Graph() graph.add_nodes_from(range(10)) nbors = list(nx.non_neighbors(graph, 0)) assert_equal(len(nbors), 9) networkx-1.8.1/networkx/classes/digraph.py0000664000175000017500000011515512177456333020613 0ustar aricaric00000000000000"""Base class for directed graphs.""" # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from copy import deepcopy import networkx as nx from networkx.classes.graph import Graph from networkx.exception import NetworkXError import networkx.convert as convert __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) class DiGraph(Graph): """ Base class for directed graphs. A DiGraph stores nodes and edges with optional data, or attributes. DiGraphs hold directed edges. Self loops are allowed but multiple (parallel) edges are not. Nodes can be arbitrary (hashable) Python objects with optional key/value attributes. Edges are represented as links between nodes with optional key/value attributes. Parameters ---------- data : input graph Data to initialize graph. If data=None (default) an empty graph is created. The data can be an edge list, or any NetworkX graph object. If the corresponding optional Python packages are installed the data can also be a NumPy matrix or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to graph as key=value pairs. See Also -------- Graph MultiGraph MultiDiGraph Examples -------- Create an empty graph structure (a "null graph") with no nodes and no edges. >>> G = nx.DiGraph() G can be grown in several ways. **Nodes:** Add one node at a time: >>> G.add_node(1) Add the nodes from any container (a list, dict, set or even the lines from a file or the nodes from another graph). >>> G.add_nodes_from([2,3]) >>> G.add_nodes_from(range(100,110)) >>> H=nx.Graph() >>> H.add_path([0,1,2,3,4,5,6,7,8,9]) >>> G.add_nodes_from(H) In addition to strings and integers any hashable Python object (except None) can represent a node, e.g. a customized node object, or even another Graph. >>> G.add_node(H) **Edges:** G can also be grown by adding edges. Add one edge, >>> G.add_edge(1, 2) a list of edges, >>> G.add_edges_from([(1,2),(1,3)]) or a collection of edges, >>> G.add_edges_from(H.edges()) If some edges connect nodes not yet in the graph, the nodes are added automatically. There are no errors when adding nodes or edges that already exist. **Attributes:** Each graph, node, and edge can hold key/value attribute pairs in an associated attribute dictionary (the keys must be hashable). By default these are empty, but can be added or changed using add_edge, add_node or direct manipulation of the attribute dictionaries named graph, node and edge respectively. >>> G = nx.DiGraph(day="Friday") >>> G.graph {'day': 'Friday'} Add node attributes using add_node(), add_nodes_from() or G.node >>> G.add_node(1, time='5pm') >>> G.add_nodes_from([3], time='2pm') >>> G.node[1] {'time': '5pm'} >>> G.node[1]['room'] = 714 >>> del G.node[1]['room'] # remove attribute >>> G.nodes(data=True) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Warning: adding a node to G.node does not add it to the graph. Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edge. >>> G.add_edge(1, 2, weight=4.7 ) >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) >>> G[1][2]['weight'] = 4.7 >>> G.edge[1][2]['weight'] = 4 **Shortcuts:** Many common graph features allow python syntax to speed reporting. >>> 1 in G # check if node in graph True >>> [n for n in G if n<3] # iterate through nodes [1, 2] >>> len(G) # number of nodes in graph 5 The fastest way to traverse all edges of a graph is via adjacency_iter(), but the edges() method is often more convenient. >>> for n,nbrsdict in G.adjacency_iter(): ... for nbr,eattr in nbrsdict.items(): ... if 'weight' in eattr: ... (n,nbr,eattr['weight']) (1, 2, 4) (2, 3, 8) >>> [ (u,v,edata['weight']) for u,v,edata in G.edges(data=True) if 'weight' in edata ] [(1, 2, 4), (2, 3, 8)] **Reporting:** Simple graph information is obtained using methods. Iterator versions of many reporting methods exist for efficiency. Methods exist for reporting nodes(), edges(), neighbors() and degree() as well as the number of nodes and edges. For details on these and other miscellaneous methods, see below. """ def __init__(self, data=None, **attr): """Initialize a graph with edges, name, graph attributes. Parameters ---------- data : input graph Data to initialize graph. If data=None (default) an empty graph is created. The data can be an edge list, or any NetworkX graph object. If the corresponding optional Python packages are installed the data can also be a NumPy matrix or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph. name : string, optional (default='') An optional name for the graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to graph as key=value pairs. See Also -------- convert Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G = nx.Graph(name='my graph') >>> e = [(1,2),(2,3),(3,4)] # list of edges >>> G = nx.Graph(e) Arbitrary graph attribute pairs (key=value) may be assigned >>> G=nx.Graph(e, day="Friday") >>> G.graph {'day': 'Friday'} """ self.graph = {} # dictionary for graph attributes self.node = {} # dictionary for node attributes # We store two adjacency lists: # the predecessors of node n are stored in the dict self.pred # the successors of node n are stored in the dict self.succ=self.adj self.adj = {} # empty adjacency dictionary self.pred = {} # predecessor self.succ = self.adj # successor # attempt to load graph with data if data is not None: convert.to_networkx_graph(data,create_using=self) # load graph attributes (must be after convert) self.graph.update(attr) self.edge=self.adj def add_node(self, n, attr_dict=None, **attr): """Add a single node n and update node attributes. Parameters ---------- n : node A node can be any hashable Python object except None. attr_dict : dictionary, optional (default= no attributes) Dictionary of node attributes. Key/value pairs will update existing data associated with the node. attr : keyword arguments, optional Set or change attributes using key=value. See Also -------- add_nodes_from Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_node(1) >>> G.add_node('Hello') >>> K3 = nx.Graph([(0,1),(1,2),(2,0)]) >>> G.add_node(K3) >>> G.number_of_nodes() 3 Use keywords set/change node attributes: >>> G.add_node(1,size=10) >>> G.add_node(3,weight=0.4,UTM=('13S',382871,3972649)) Notes ----- A hashable object is one that can be used as a key in a Python dictionary. This includes strings, numbers, tuples of strings and numbers, etc. On many platforms hashable items also include mutables such as NetworkX Graphs, though one should be careful that the hash doesn't change on mutables. """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") if n not in self.succ: self.succ[n] = {} self.pred[n] = {} self.node[n] = attr_dict else: # update attr even if node already exists self.node[n].update(attr_dict) def add_nodes_from(self, nodes, **attr): """Add multiple nodes. Parameters ---------- nodes : iterable container A container of nodes (list, dict, set, etc.). OR A container of (node, attribute dict) tuples. Node attributes are updated using the attribute dict. attr : keyword arguments, optional (default= no attributes) Update attributes for all nodes in nodes. Node attributes specified in nodes as a tuple take precedence over attributes specified generally. See Also -------- add_node Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_nodes_from('Hello') >>> K3 = nx.Graph([(0,1),(1,2),(2,0)]) >>> G.add_nodes_from(K3) >>> sorted(G.nodes(),key=str) [0, 1, 2, 'H', 'e', 'l', 'o'] Use keywords to update specific node attributes for every node. >>> G.add_nodes_from([1,2], size=10) >>> G.add_nodes_from([3,4], weight=0.4) Use (node, attrdict) tuples to update attributes for specific nodes. >>> G.add_nodes_from([(1,dict(size=11)), (2,{'color':'blue'})]) >>> G.node[1]['size'] 11 >>> H = nx.Graph() >>> H.add_nodes_from(G.nodes(data=True)) >>> H.node[1]['size'] 11 """ for n in nodes: try: newnode=n not in self.succ except TypeError: nn,ndict = n if nn not in self.succ: self.succ[nn] = {} self.pred[nn] = {} newdict = attr.copy() newdict.update(ndict) self.node[nn] = newdict else: olddict = self.node[nn] olddict.update(attr) olddict.update(ndict) continue if newnode: self.succ[n] = {} self.pred[n] = {} self.node[n] = attr.copy() else: self.node[n].update(attr) def remove_node(self, n): """Remove node n. Removes the node n and all adjacent edges. Attempting to remove a non-existent node will raise an exception. Parameters ---------- n : node A node in the graph Raises ------- NetworkXError If n is not in the graph. See Also -------- remove_nodes_from Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> G.edges() [(0, 1), (1, 2)] >>> G.remove_node(1) >>> G.edges() [] """ try: nbrs=self.succ[n] del self.node[n] except KeyError: # NetworkXError if n not in self raise NetworkXError("The node %s is not in the digraph."%(n,)) for u in nbrs: del self.pred[u][n] # remove all edges n-u in digraph del self.succ[n] # remove node from succ for u in self.pred[n]: del self.succ[u][n] # remove all edges n-u in digraph del self.pred[n] # remove node from pred def remove_nodes_from(self, nbunch): """Remove multiple nodes. Parameters ---------- nodes : iterable container A container of nodes (list, dict, set, etc.). If a node in the container is not in the graph it is silently ignored. See Also -------- remove_node Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2]) >>> e = G.nodes() >>> e [0, 1, 2] >>> G.remove_nodes_from(e) >>> G.nodes() [] """ for n in nbunch: try: succs=self.succ[n] del self.node[n] for u in succs: del self.pred[u][n] # remove all edges n-u in digraph del self.succ[n] # now remove node for u in self.pred[n]: del self.succ[u][n] # remove all edges n-u in digraph del self.pred[n] # now remove node except KeyError: pass # silent failure on remove def add_edge(self, u, v, attr_dict=None, **attr): """Add an edge between u and v. The nodes u and v will be automatically added if they are not already in the graph. Edge attributes can be specified with keywords or by providing a dictionary with key/value pairs. See examples below. Parameters ---------- u,v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with the edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edges_from : add a collection of edges Notes ----- Adding an edge that already exists updates the edge data. Many NetworkX algorithms designed for weighted graphs use as the edge weight a numerical value assigned to a keyword which by default is 'weight'. Examples -------- The following all add the edge e=(1,2) to graph G: >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> e = (1,2) >>> G.add_edge(1, 2) # explicit two-node form >>> G.add_edge(*e) # single edge as tuple of two nodes >>> G.add_edges_from( [(1,2)] ) # add edges from iterable container Associate data to edges using keywords: >>> G.add_edge(1, 2, weight=3) >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") # add nodes if u not in self.succ: self.succ[u]={} self.pred[u]={} self.node[u] = {} if v not in self.succ: self.succ[v]={} self.pred[v]={} self.node[v] = {} # add the edge datadict=self.adj[u].get(v,{}) datadict.update(attr_dict) self.succ[u][v]=datadict self.pred[v][u]=datadict def add_edges_from(self, ebunch, attr_dict=None, **attr): """Add all the edges in ebunch. Parameters ---------- ebunch : container of edges Each edge given in the container will be added to the graph. The edges must be given as as 2-tuples (u,v) or 3-tuples (u,v,d) where d is a dictionary containing edge data. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with each edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edge : add a single edge add_weighted_edges_from : convenient way to add weighted edges Notes ----- Adding the same edge twice has no effect but any edge data will be updated when each duplicate edge is added. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples >>> e = zip(range(0,3),range(1,4)) >>> G.add_edges_from(e) # Add the path graph 0-1-2-3 Associate data to edges >>> G.add_edges_from([(1,2),(2,3)], weight=3) >>> G.add_edges_from([(3,4),(1,4)], label='WN2898') """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dict.") # process ebunch for e in ebunch: ne = len(e) if ne==3: u,v,dd = e assert hasattr(dd,"update") elif ne==2: u,v = e dd = {} else: raise NetworkXError(\ "Edge tuple %s must be a 2-tuple or 3-tuple."%(e,)) if u not in self.succ: self.succ[u] = {} self.pred[u] = {} self.node[u] = {} if v not in self.succ: self.succ[v] = {} self.pred[v] = {} self.node[v] = {} datadict=self.adj[u].get(v,{}) datadict.update(attr_dict) datadict.update(dd) self.succ[u][v] = datadict self.pred[v][u] = datadict def remove_edge(self, u, v): """Remove the edge between u and v. Parameters ---------- u,v: nodes Remove the edge between nodes u and v. Raises ------ NetworkXError If there is not an edge between u and v. See Also -------- remove_edges_from : remove a collection of edges Examples -------- >>> G = nx.Graph() # or DiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.remove_edge(0,1) >>> e = (1,2) >>> G.remove_edge(*e) # unpacks e from an edge tuple >>> e = (2,3,{'weight':7}) # an edge with attribute data >>> G.remove_edge(*e[:2]) # select first part of edge tuple """ try: del self.succ[u][v] del self.pred[v][u] except KeyError: raise NetworkXError("The edge %s-%s not in graph."%(u,v)) def remove_edges_from(self, ebunch): """Remove all edges specified in ebunch. Parameters ---------- ebunch: list or container of edge tuples Each edge given in the list or container will be removed from the graph. The edges can be: - 2-tuples (u,v) edge between u and v. - 3-tuples (u,v,k) where k is ignored. See Also -------- remove_edge : remove a single edge Notes ----- Will fail silently if an edge in ebunch is not in the graph. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> ebunch=[(1,2),(2,3)] >>> G.remove_edges_from(ebunch) """ for e in ebunch: (u,v)=e[:2] # ignore edge data if u in self.succ and v in self.succ[u]: del self.succ[u][v] del self.pred[v][u] def has_successor(self, u, v): """Return True if node u has successor v. This is true if graph has the edge u->v. """ return (u in self.succ and v in self.succ[u]) def has_predecessor(self, u, v): """Return True if node u has predecessor v. This is true if graph has the edge u<-v. """ return (u in self.pred and v in self.pred[u]) def successors_iter(self,n): """Return an iterator over successor nodes of n. neighbors_iter() and successors_iter() are the same. """ try: return iter(self.succ[n]) except KeyError: raise NetworkXError("The node %s is not in the digraph."%(n,)) def predecessors_iter(self,n): """Return an iterator over predecessor nodes of n.""" try: return iter(self.pred[n]) except KeyError: raise NetworkXError("The node %s is not in the digraph."%(n,)) def successors(self, n): """Return a list of successor nodes of n. neighbors() and successors() are the same function. """ return list(self.successors_iter(n)) def predecessors(self, n): """Return a list of predecessor nodes of n.""" return list(self.predecessors_iter(n)) # digraph definitions neighbors = successors neighbors_iter = successors_iter def edges_iter(self, nbunch=None, data=False): """Return an iterator over the edges. Edges are returned as tuples with optional data in the order (node, neighbor, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict in 3-tuple (u,v,data). Returns ------- edge_iter : iterator An iterator of (u,v) or (u,v,d) tuples of edges. See Also -------- edges : return a list of edges Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-edges. Examples -------- >>> G = nx.DiGraph() # or MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> [e for e in G.edges_iter()] [(0, 1), (1, 2), (2, 3)] >>> list(G.edges_iter(data=True)) # default data is {} (empty dict) [(0, 1, {}), (1, 2, {}), (2, 3, {})] >>> list(G.edges_iter([0,2])) [(0, 1), (2, 3)] >>> list(G.edges_iter(0)) [(0, 1)] """ if nbunch is None: nodes_nbrs=self.adj.items() else: nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch)) if data: for n,nbrs in nodes_nbrs: for nbr,data in nbrs.items(): yield (n,nbr,data) else: for n,nbrs in nodes_nbrs: for nbr in nbrs: yield (n,nbr) # alias out_edges to edges out_edges_iter=edges_iter out_edges=Graph.edges def in_edges_iter(self, nbunch=None, data=False): """Return an iterator over the incoming edges. Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict in 3-tuple (u,v,data). Returns ------- in_edge_iter : iterator An iterator of (u,v) or (u,v,d) tuples of incoming edges. See Also -------- edges_iter : return an iterator of edges """ if nbunch is None: nodes_nbrs=self.pred.items() else: nodes_nbrs=((n,self.pred[n]) for n in self.nbunch_iter(nbunch)) if data: for n,nbrs in nodes_nbrs: for nbr,data in nbrs.items(): yield (nbr,n,data) else: for n,nbrs in nodes_nbrs: for nbr in nbrs: yield (nbr,n) def in_edges(self, nbunch=None, data=False): """Return a list of the incoming edges. See Also -------- edges : return a list of edges """ return list(self.in_edges_iter(nbunch, data)) def degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, degree). The node degree is the number of edges adjacent to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree, in_degree, out_degree, in_degree_iter, out_degree_iter Examples -------- >>> G = nx.DiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> list(G.degree_iter(0)) # node 0 with degree 1 [(0, 1)] >>> list(G.degree_iter([0,1])) [(0, 1), (1, 2)] """ if nbunch is None: nodes_nbrs=zip(iter(self.succ.items()),iter(self.pred.items())) else: nodes_nbrs=zip( ((n,self.succ[n]) for n in self.nbunch_iter(nbunch)), ((n,self.pred[n]) for n in self.nbunch_iter(nbunch))) if weight is None: for (n,succ),(n2,pred) in nodes_nbrs: yield (n,len(succ)+len(pred)) else: # edge weighted graph - degree is sum of edge weights for (n,succ),(n2,pred) in nodes_nbrs: yield (n, sum((succ[nbr].get(weight,1) for nbr in succ))+ sum((pred[nbr].get(weight,1) for nbr in pred))) def in_degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, in-degree). The node in-degree is the number of edges pointing in to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, in-degree). See Also -------- degree, in_degree, out_degree, out_degree_iter Examples -------- >>> G = nx.DiGraph() >>> G.add_path([0,1,2,3]) >>> list(G.in_degree_iter(0)) # node 0 with degree 0 [(0, 0)] >>> list(G.in_degree_iter([0,1])) [(0, 0), (1, 1)] """ if nbunch is None: nodes_nbrs=self.pred.items() else: nodes_nbrs=((n,self.pred[n]) for n in self.nbunch_iter(nbunch)) if weight is None: for n,nbrs in nodes_nbrs: yield (n,len(nbrs)) else: # edge weighted graph - degree is sum of edge weights for n,nbrs in nodes_nbrs: yield (n, sum(data.get(weight,1) for data in nbrs.values())) def out_degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, out-degree). The node out-degree is the number of edges pointing out of the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, out-degree). See Also -------- degree, in_degree, out_degree, in_degree_iter Examples -------- >>> G = nx.DiGraph() >>> G.add_path([0,1,2,3]) >>> list(G.out_degree_iter(0)) # node 0 with degree 1 [(0, 1)] >>> list(G.out_degree_iter([0,1])) [(0, 1), (1, 1)] """ if nbunch is None: nodes_nbrs=self.succ.items() else: nodes_nbrs=((n,self.succ[n]) for n in self.nbunch_iter(nbunch)) if weight is None: for n,nbrs in nodes_nbrs: yield (n,len(nbrs)) else: # edge weighted graph - degree is sum of edge weights for n,nbrs in nodes_nbrs: yield (n, sum(data.get(weight,1) for data in nbrs.values())) def in_degree(self, nbunch=None, weight=None): """Return the in-degree of a node or nodes. The node in-degree is the number of edges pointing in to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and in-degree as values or a number if a single node is specified. See Also -------- degree, out_degree, in_degree_iter Examples -------- >>> G = nx.DiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> G.in_degree(0) 0 >>> G.in_degree([0,1]) {0: 0, 1: 1} >>> list(G.in_degree([0,1]).values()) [0, 1] """ if nbunch in self: # return a single node return next(self.in_degree_iter(nbunch,weight))[1] else: # return a dict return dict(self.in_degree_iter(nbunch,weight)) def out_degree(self, nbunch=None, weight=None): """Return the out-degree of a node or nodes. The node out-degree is the number of edges pointing out of the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd : dictionary, or number A dictionary with nodes as keys and out-degree as values or a number if a single node is specified. Examples -------- >>> G = nx.DiGraph() # or MultiDiGraph >>> G.add_path([0,1,2,3]) >>> G.out_degree(0) 1 >>> G.out_degree([0,1]) {0: 1, 1: 1} >>> list(G.out_degree([0,1]).values()) [1, 1] """ if nbunch in self: # return a single node return next(self.out_degree_iter(nbunch,weight))[1] else: # return a dict return dict(self.out_degree_iter(nbunch,weight)) def clear(self): """Remove all nodes and edges from the graph. This also removes the name, and all graph, node, and edge attributes. Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> G.clear() >>> G.nodes() [] >>> G.edges() [] """ self.succ.clear() self.pred.clear() self.node.clear() self.graph.clear() def is_multigraph(self): """Return True if graph is a multigraph, False otherwise.""" return False def is_directed(self): """Return True if graph is directed, False otherwise.""" return True def to_directed(self): """Return a directed copy of the graph. Returns ------- G : DiGraph A deepcopy of the graph. Notes ----- This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar D=DiGraph(G) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Examples -------- >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1), (1, 0)] If already directed, return a (deep) copy >>> G = nx.DiGraph() # or MultiDiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1)] """ return deepcopy(self) def to_undirected(self, reciprocal=False): """Return an undirected representation of the digraph. Parameters ---------- reciprocal : bool (optional) If True only keep edges that appear in both directions in the original digraph. Returns ------- G : Graph An undirected graph with the same name and nodes and with edge (u,v,data) if either (u,v,data) or (v,u,data) is in the digraph. If both edges exist in digraph and their edge data is different, only one edge is created with an arbitrary choice of which edge data to use. You must check and correct for this manually if desired. Notes ----- If edges in both directions (u,v) and (v,u) exist in the graph, attributes for the new undirected edge will be a combination of the attributes of the directed edges. The edge data is updated in the (arbitrary) order that the edges are encountered. For more customized control of the edge attributes use add_edge(). This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar G=DiGraph(D) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. """ H=Graph() H.name=self.name H.add_nodes_from(self) if reciprocal is True: H.add_edges_from( (u,v,deepcopy(d)) for u,nbrs in self.adjacency_iter() for v,d in nbrs.items() if v in self.pred[u]) else: H.add_edges_from( (u,v,deepcopy(d)) for u,nbrs in self.adjacency_iter() for v,d in nbrs.items() ) H.graph=deepcopy(self.graph) H.node=deepcopy(self.node) return H def reverse(self, copy=True): """Return the reverse of the graph. The reverse is a graph with the same nodes and edges but with the directions of the edges reversed. Parameters ---------- copy : bool optional (default=True) If True, return a new DiGraph holding the reversed edges. If False, reverse the reverse graph is created using the original graph (this changes the original graph). """ if copy: H = self.__class__(name="Reverse of (%s)"%self.name) H.add_nodes_from(self) H.add_edges_from( (v,u,deepcopy(d)) for u,v,d in self.edges(data=True) ) H.graph=deepcopy(self.graph) H.node=deepcopy(self.node) else: self.pred,self.succ=self.succ,self.pred self.adj=self.succ H=self return H def subgraph(self, nbunch): """Return the subgraph induced on nodes in nbunch. The induced subgraph of the graph contains the nodes in nbunch and the edges between those nodes. Parameters ---------- nbunch : list, iterable A container of nodes which will be iterated through once. Returns ------- G : Graph A subgraph of the graph with the same edge attributes. Notes ----- The graph, edge or node attributes just point to the original graph. So changes to the node or edge structure will not be reflected in the original graph while changes to the attributes will. To create a subgraph with its own copy of the edge/node attributes use: nx.Graph(G.subgraph(nbunch)) If edge attributes are containers, a deep copy can be obtained using: G.subgraph(nbunch).copy() For an inplace reduction of a graph to a subgraph you can remove nodes: G.remove_nodes_from([ n in G if n not in set(nbunch)]) Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> H = G.subgraph([0,1,2]) >>> H.edges() [(0, 1), (1, 2)] """ bunch = self.nbunch_iter(nbunch) # create new graph and copy subgraph into it H = self.__class__() # copy node and attribute dictionaries for n in bunch: H.node[n]=self.node[n] # namespace shortcuts for speed H_succ=H.succ H_pred=H.pred self_succ=self.succ # add nodes for n in H: H_succ[n]={} H_pred[n]={} # add edges for u in H_succ: Hnbrs=H_succ[u] for v,datadict in self_succ[u].items(): if v in H_succ: # add both representations of edge: u-v and v-u Hnbrs[v]=datadict H_pred[v][u]=datadict H.graph=self.graph return H networkx-1.8.1/networkx/classes/function.py0000664000175000017500000002441412177456333021017 0ustar aricaric00000000000000"""Functional interface to graph methods and assorted utilities. """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # import networkx as nx import itertools __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) __all__ = ['nodes', 'edges', 'degree', 'degree_histogram', 'neighbors', 'number_of_nodes', 'number_of_edges', 'density', 'nodes_iter', 'edges_iter', 'is_directed','info', 'freeze','is_frozen','subgraph','create_empty_copy', 'set_node_attributes','get_node_attributes', 'set_edge_attributes','get_edge_attributes', 'all_neighbors','non_neighbors'] def nodes(G): """Return a copy of the graph nodes in a list.""" return G.nodes() def nodes_iter(G): """Return an iterator over the graph nodes.""" return G.nodes_iter() def edges(G,nbunch=None): """Return list of edges adjacent to nodes in nbunch. Return all edges if nbunch is unspecified or nbunch=None. For digraphs, edges=out_edges """ return G.edges(nbunch) def edges_iter(G,nbunch=None): """Return iterator over edges adjacent to nodes in nbunch. Return all edges if nbunch is unspecified or nbunch=None. For digraphs, edges=out_edges """ return G.edges_iter(nbunch) def degree(G,nbunch=None,weight=None): """Return degree of single node or of nbunch of nodes. If nbunch is ommitted, then return degrees of *all* nodes. """ return G.degree(nbunch,weight) def neighbors(G,n): """Return a list of nodes connected to node n. """ return G.neighbors(n) def number_of_nodes(G): """Return the number of nodes in the graph.""" return G.number_of_nodes() def number_of_edges(G): """Return the number of edges in the graph. """ return G.number_of_edges() def density(G): r"""Return the density of a graph. The density for undirected graphs is .. math:: d = \frac{2m}{n(n-1)}, and for directed graphs is .. math:: d = \frac{m}{n(n-1)}, where `n` is the number of nodes and `m` is the number of edges in `G`. Notes ----- The density is 0 for a graph without edges and 1 for a complete graph. The density of multigraphs can be higher than 1. Self loops are counted in the total number of edges so graphs with self loops can have density higher than 1. """ n=number_of_nodes(G) m=number_of_edges(G) if m==0 or n <= 1: d=0.0 else: if G.is_directed(): d=m/float(n*(n-1)) else: d= m*2.0/float(n*(n-1)) return d def degree_histogram(G): """Return a list of the frequency of each degree value. Parameters ---------- G : Networkx graph A graph Returns ------- hist : list A list of frequencies of degrees. The degree values are the index in the list. Notes ----- Note: the bins are width one, hence len(list) can be large (Order(number_of_edges)) """ degseq=list(G.degree().values()) dmax=max(degseq)+1 freq= [ 0 for d in range(dmax) ] for d in degseq: freq[d] += 1 return freq def is_directed(G): """ Return True if graph is directed.""" return G.is_directed() def freeze(G): """Modify graph to prevent further change by adding or removing nodes or edges. Node and edge data can still be modified. Parameters ----------- G : graph A NetworkX graph Examples -------- >>> G=nx.Graph() >>> G.add_path([0,1,2,3]) >>> G=nx.freeze(G) >>> try: ... G.add_edge(4,5) ... except nx.NetworkXError as e: ... print(str(e)) Frozen graph can't be modified Notes ----- To "unfreeze" a graph you must make a copy by creating a new graph object: >>> graph = nx.path_graph(4) >>> frozen_graph = nx.freeze(graph) >>> unfrozen_graph = nx.Graph(frozen_graph) >>> nx.is_frozen(unfrozen_graph) False See Also -------- is_frozen """ def frozen(*args): raise nx.NetworkXError("Frozen graph can't be modified") G.add_node=frozen G.add_nodes_from=frozen G.remove_node=frozen G.remove_nodes_from=frozen G.add_edge=frozen G.add_edges_from=frozen G.remove_edge=frozen G.remove_edges_from=frozen G.clear=frozen G.frozen=True return G def is_frozen(G): """Return True if graph is frozen. Parameters ----------- G : graph A NetworkX graph See Also -------- freeze """ try: return G.frozen except AttributeError: return False def subgraph(G, nbunch): """Return the subgraph induced on nodes in nbunch. Parameters ---------- G : graph A NetworkX graph nbunch : list, iterable A container of nodes that will be iterated through once (thus it should be an iterator or be iterable). Each element of the container should be a valid node type: any hashable type except None. If nbunch is None, return all edges data in the graph. Nodes in nbunch that are not in the graph will be (quietly) ignored. Notes ----- subgraph(G) calls G.subgraph() """ return G.subgraph(nbunch) def create_empty_copy(G,with_nodes=True): """Return a copy of the graph G with all of the edges removed. Parameters ---------- G : graph A NetworkX graph with_nodes : bool (default=True) Include nodes. Notes ----- Graph, node, and edge data is not propagated to the new graph. """ H=G.__class__() if with_nodes: H.add_nodes_from(G) return H def info(G, n=None): """Print short summary of information for the graph G or the node n. Parameters ---------- G : Networkx graph A graph n : node (any hashable) A node in the graph G """ info='' # append this all to a string if n is None: info+="Name: %s\n"%G.name type_name = [type(G).__name__] info+="Type: %s\n"%",".join(type_name) info+="Number of nodes: %d\n"%G.number_of_nodes() info+="Number of edges: %d\n"%G.number_of_edges() nnodes=G.number_of_nodes() if len(G) > 0: if G.is_directed(): info+="Average in degree: %8.4f\n"%\ (sum(G.in_degree().values())/float(nnodes)) info+="Average out degree: %8.4f"%\ (sum(G.out_degree().values())/float(nnodes)) else: s=sum(G.degree().values()) info+="Average degree: %8.4f"%\ (float(s)/float(nnodes)) else: if n not in G: raise nx.NetworkXError("node %s not in graph"%(n,)) info+="Node % s has the following properties:\n"%n info+="Degree: %d\n"%G.degree(n) info+="Neighbors: " info+=' '.join(str(nbr) for nbr in G.neighbors(n)) return info def set_node_attributes(G,name,attributes): """Set node attributes from dictionary of nodes and values Parameters ---------- G : NetworkX Graph name : string Attribute name attributes: dict Dictionary of attributes keyed by node. Examples -------- >>> G=nx.path_graph(3) >>> bb=nx.betweenness_centrality(G) >>> nx.set_node_attributes(G,'betweenness',bb) >>> G.node[1]['betweenness'] 1.0 """ for node,value in attributes.items(): G.node[node][name]=value def get_node_attributes(G,name): """Get node attributes from graph Parameters ---------- G : NetworkX Graph name : string Attribute name Returns ------- Dictionary of attributes keyed by node. Examples -------- >>> G=nx.Graph() >>> G.add_nodes_from([1,2,3],color='red') >>> color=nx.get_node_attributes(G,'color') >>> color[1] 'red' """ return dict( (n,d[name]) for n,d in G.node.items() if name in d) def set_edge_attributes(G,name,attributes): """Set edge attributes from dictionary of edge tuples and values Parameters ---------- G : NetworkX Graph name : string Attribute name attributes: dict Dictionary of attributes keyed by edge (tuple). Examples -------- >>> G=nx.path_graph(3) >>> bb=nx.edge_betweenness_centrality(G, normalized=False) >>> nx.set_edge_attributes(G,'betweenness',bb) >>> G[1][2]['betweenness'] 2.0 """ for (u,v),value in attributes.items(): G[u][v][name]=value def get_edge_attributes(G,name): """Get edge attributes from graph Parameters ---------- G : NetworkX Graph name : string Attribute name Returns ------- Dictionary of attributes keyed by node. Examples -------- >>> G=nx.Graph() >>> G.add_path([1,2,3],color='red') >>> color=nx.get_edge_attributes(G,'color') >>> color[(1,2)] 'red' """ return dict( ((u,v),d[name]) for u,v,d in G.edges(data=True) if name in d) def all_neighbors(graph, node): """ Returns all of the neighbors of a node in the graph. If the graph is directed returns predecessors as well as successors. Parameters ---------- graph : NetworkX graph Graph to find neighbors. node : node The node whose neighbors will be returned. Returns ------- neighbors : iterator Iterator of neighbors """ if graph.is_directed(): values = itertools.chain.from_iterable([graph.predecessors_iter(node), graph.successors_iter(node)]) else: values = graph.neighbors_iter(node) return values def non_neighbors(graph, node): """Returns the non-neighbors of the node in the graph. Parameters ---------- graph : NetworkX graph Graph to find neighbors. node : node The node whose neighbors will be returned. Returns ------- non_neighbors : iterator Iterator of nodes in the graph that are not neighbors of the node. """ nbors = set(neighbors(graph, node)) | set([node]) return (nnode for nnode in graph if nnode not in nbors) networkx-1.8.1/networkx/classes/multidigraph.py0000664000175000017500000006766612177456333021703 0ustar aricaric00000000000000"""Base class for MultiDiGraph.""" # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from copy import deepcopy import networkx as nx from networkx.classes.graph import Graph # for doctests from networkx.classes.digraph import DiGraph from networkx.classes.multigraph import MultiGraph from networkx.exception import NetworkXError __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) class MultiDiGraph(MultiGraph,DiGraph): """A directed graph class that can store multiedges. Multiedges are multiple edges between two nodes. Each edge can hold optional data or attributes. A MultiDiGraph holds directed edges. Self loops are allowed. Nodes can be arbitrary (hashable) Python objects with optional key/value attributes. Edges are represented as links between nodes with optional key/value attributes. Parameters ---------- data : input graph Data to initialize graph. If data=None (default) an empty graph is created. The data can be an edge list, or any NetworkX graph object. If the corresponding optional Python packages are installed the data can also be a NumPy matrix or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph. attr : keyword arguments, optional (default= no attributes) Attributes to add to graph as key=value pairs. See Also -------- Graph DiGraph MultiGraph Examples -------- Create an empty graph structure (a "null graph") with no nodes and no edges. >>> G = nx.MultiDiGraph() G can be grown in several ways. **Nodes:** Add one node at a time: >>> G.add_node(1) Add the nodes from any container (a list, dict, set or even the lines from a file or the nodes from another graph). >>> G.add_nodes_from([2,3]) >>> G.add_nodes_from(range(100,110)) >>> H=nx.Graph() >>> H.add_path([0,1,2,3,4,5,6,7,8,9]) >>> G.add_nodes_from(H) In addition to strings and integers any hashable Python object (except None) can represent a node, e.g. a customized node object, or even another Graph. >>> G.add_node(H) **Edges:** G can also be grown by adding edges. Add one edge, >>> G.add_edge(1, 2) a list of edges, >>> G.add_edges_from([(1,2),(1,3)]) or a collection of edges, >>> G.add_edges_from(H.edges()) If some edges connect nodes not yet in the graph, the nodes are added automatically. If an edge already exists, an additional edge is created and stored using a key to identify the edge. By default the key is the lowest unused integer. >>> G.add_edges_from([(4,5,dict(route=282)), (4,5,dict(route=37))]) >>> G[4] {5: {0: {}, 1: {'route': 282}, 2: {'route': 37}}} **Attributes:** Each graph, node, and edge can hold key/value attribute pairs in an associated attribute dictionary (the keys must be hashable). By default these are empty, but can be added or changed using add_edge, add_node or direct manipulation of the attribute dictionaries named graph, node and edge respectively. >>> G = nx.MultiDiGraph(day="Friday") >>> G.graph {'day': 'Friday'} Add node attributes using add_node(), add_nodes_from() or G.node >>> G.add_node(1, time='5pm') >>> G.add_nodes_from([3], time='2pm') >>> G.node[1] {'time': '5pm'} >>> G.node[1]['room'] = 714 >>> del G.node[1]['room'] # remove attribute >>> G.nodes(data=True) [(1, {'time': '5pm'}), (3, {'time': '2pm'})] Warning: adding a node to G.node does not add it to the graph. Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edge. >>> G.add_edge(1, 2, weight=4.7 ) >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) >>> G[1][2][0]['weight'] = 4.7 >>> G.edge[1][2][0]['weight'] = 4 **Shortcuts:** Many common graph features allow python syntax to speed reporting. >>> 1 in G # check if node in graph True >>> [n for n in G if n<3] # iterate through nodes [1, 2] >>> len(G) # number of nodes in graph 5 >>> G[1] # adjacency dict keyed by neighbor to edge attributes ... # Note: you should not change this dict manually! {2: {0: {'weight': 4}, 1: {'color': 'blue'}}} The fastest way to traverse all edges of a graph is via adjacency_iter(), but the edges() method is often more convenient. >>> for n,nbrsdict in G.adjacency_iter(): ... for nbr,keydict in nbrsdict.items(): ... for key,eattr in keydict.items(): ... if 'weight' in eattr: ... (n,nbr,eattr['weight']) (1, 2, 4) (2, 3, 8) >>> [ (u,v,edata['weight']) for u,v,edata in G.edges(data=True) if 'weight' in edata ] [(1, 2, 4), (2, 3, 8)] **Reporting:** Simple graph information is obtained using methods. Iterator versions of many reporting methods exist for efficiency. Methods exist for reporting nodes(), edges(), neighbors() and degree() as well as the number of nodes and edges. For details on these and other miscellaneous methods, see below. """ def add_edge(self, u, v, key=None, attr_dict=None, **attr): """Add an edge between u and v. The nodes u and v will be automatically added if they are not already in the graph. Edge attributes can be specified with keywords or by providing a dictionary with key/value pairs. See examples below. Parameters ---------- u,v : nodes Nodes can be, for example, strings or numbers. Nodes must be hashable (and not None) Python objects. key : hashable identifier, optional (default=lowest unused integer) Used to distinguish multiedges between a pair of nodes. attr_dict : dictionary, optional (default= no attributes) Dictionary of edge attributes. Key/value pairs will update existing data associated with the edge. attr : keyword arguments, optional Edge data (or labels or objects) can be assigned using keyword arguments. See Also -------- add_edges_from : add a collection of edges Notes ----- To replace/update edge data, use the optional key argument to identify a unique edge. Otherwise a new edge will be created. NetworkX algorithms designed for weighted graphs cannot use multigraphs directly because it is not clear how to handle multiedge weights. Convert to Graph using edge attribute 'weight' to enable weighted graph algorithms. Examples -------- The following all add the edge e=(1,2) to graph G: >>> G = nx.MultiDiGraph() >>> e = (1,2) >>> G.add_edge(1, 2) # explicit two-node form >>> G.add_edge(*e) # single edge as tuple of two nodes >>> G.add_edges_from( [(1,2)] ) # add edges from iterable container Associate data to edges using keywords: >>> G.add_edge(1, 2, weight=3) >>> G.add_edge(1, 2, key=0, weight=4) # update data for key=0 >>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7) """ # set up attribute dict if attr_dict is None: attr_dict=attr else: try: attr_dict.update(attr) except AttributeError: raise NetworkXError(\ "The attr_dict argument must be a dictionary.") # add nodes if u not in self.succ: self.succ[u] = {} self.pred[u] = {} self.node[u] = {} if v not in self.succ: self.succ[v] = {} self.pred[v] = {} self.node[v] = {} if v in self.succ[u]: keydict=self.adj[u][v] if key is None: # find a unique integer key # other methods might be better here? key=len(keydict) while key in keydict: key+=1 datadict=keydict.get(key,{}) datadict.update(attr_dict) keydict[key]=datadict else: # selfloops work this way without special treatment if key is None: key=0 datadict={} datadict.update(attr_dict) keydict={key:datadict} self.succ[u][v] = keydict self.pred[v][u] = keydict def remove_edge(self, u, v, key=None): """Remove an edge between u and v. Parameters ---------- u,v: nodes Remove an edge between nodes u and v. key : hashable identifier, optional (default=None) Used to distinguish multiple edges between a pair of nodes. If None remove a single (abritrary) edge between u and v. Raises ------ NetworkXError If there is not an edge between u and v, or if there is no edge with the specified key. See Also -------- remove_edges_from : remove a collection of edges Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_path([0,1,2,3]) >>> G.remove_edge(0,1) >>> e = (1,2) >>> G.remove_edge(*e) # unpacks e from an edge tuple For multiple edges >>> G = nx.MultiDiGraph() >>> G.add_edges_from([(1,2),(1,2),(1,2)]) >>> G.remove_edge(1,2) # remove a single (arbitrary) edge For edges with keys >>> G = nx.MultiDiGraph() >>> G.add_edge(1,2,key='first') >>> G.add_edge(1,2,key='second') >>> G.remove_edge(1,2,key='second') """ try: d=self.adj[u][v] except (KeyError): raise NetworkXError( "The edge %s-%s is not in the graph."%(u,v)) # remove the edge with specified data if key is None: d.popitem() else: try: del d[key] except (KeyError): raise NetworkXError( "The edge %s-%s with key %s is not in the graph."%(u,v,key)) if len(d)==0: # remove the key entries if last edge del self.succ[u][v] del self.pred[v][u] def edges_iter(self, nbunch=None, data=False, keys=False): """Return an iterator over the edges. Edges are returned as tuples with optional data and keys in the order (node, neighbor, key, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict with each edge. keys : bool, optional (default=False) If True, return edge keys with each edge. Returns ------- edge_iter : iterator An iterator of (u,v), (u,v,d) or (u,v,key,d) tuples of edges. See Also -------- edges : return a list of edges Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs this returns the out-edges. Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_path([0,1,2,3]) >>> [e for e in G.edges_iter()] [(0, 1), (1, 2), (2, 3)] >>> list(G.edges_iter(data=True)) # default data is {} (empty dict) [(0, 1, {}), (1, 2, {}), (2, 3, {})] >>> list(G.edges_iter([0,2])) [(0, 1), (2, 3)] >>> list(G.edges_iter(0)) [(0, 1)] """ if nbunch is None: nodes_nbrs = self.adj.items() else: nodes_nbrs=((n,self.adj[n]) for n in self.nbunch_iter(nbunch)) if data: for n,nbrs in nodes_nbrs: for nbr,keydict in nbrs.items(): for key,data in keydict.items(): if keys: yield (n,nbr,key,data) else: yield (n,nbr,data) else: for n,nbrs in nodes_nbrs: for nbr,keydict in nbrs.items(): for key,data in keydict.items(): if keys: yield (n,nbr,key) else: yield (n,nbr) # alias out_edges to edges out_edges_iter=edges_iter def out_edges(self, nbunch=None, keys=False, data=False): """Return a list of the outgoing edges. Edges are returned as tuples with optional data and keys in the order (node, neighbor, key, data). Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict with each edge. keys : bool, optional (default=False) If True, return edge keys with each edge. Returns ------- out_edges : list An listr of (u,v), (u,v,d) or (u,v,key,d) tuples of edges. Notes ----- Nodes in nbunch that are not in the graph will be (quietly) ignored. For directed graphs edges() is the same as out_edges(). See Also -------- in_edges: return a list of incoming edges """ return list(self.out_edges_iter(nbunch, keys=keys, data=data)) def in_edges_iter(self, nbunch=None, data=False, keys=False): """Return an iterator over the incoming edges. Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict with each edge. keys : bool, optional (default=False) If True, return edge keys with each edge. Returns ------- in_edge_iter : iterator An iterator of (u,v), (u,v,d) or (u,v,key,d) tuples of edges. See Also -------- edges_iter : return an iterator of edges """ if nbunch is None: nodes_nbrs=self.pred.items() else: nodes_nbrs=((n,self.pred[n]) for n in self.nbunch_iter(nbunch)) if data: for n,nbrs in nodes_nbrs: for nbr,keydict in nbrs.items(): for key,data in keydict.items(): if keys: yield (nbr,n,key,data) else: yield (nbr,n,data) else: for n,nbrs in nodes_nbrs: for nbr,keydict in nbrs.items(): for key,data in keydict.items(): if keys: yield (nbr,n,key) else: yield (nbr,n) def in_edges(self, nbunch=None, keys=False, data=False): """Return a list of the incoming edges. Parameters ---------- nbunch : iterable container, optional (default= all nodes) A container of nodes. The container will be iterated through once. data : bool, optional (default=False) If True, return edge attribute dict with each edge. keys : bool, optional (default=False) If True, return edge keys with each edge. Returns ------- in_edges : list A list of (u,v), (u,v,d) or (u,v,key,d) tuples of edges. See Also -------- out_edges: return a list of outgoing edges """ return list(self.in_edges_iter(nbunch, keys=keys, data=data)) def degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, degree). The node degree is the number of edges adjacent to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, degree). See Also -------- degree Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_path([0,1,2,3]) >>> list(G.degree_iter(0)) # node 0 with degree 1 [(0, 1)] >>> list(G.degree_iter([0,1])) [(0, 1), (1, 2)] """ if nbunch is None: nodes_nbrs=zip(iter(self.succ.items()),iter(self.pred.items())) else: nodes_nbrs=zip( ((n,self.succ[n]) for n in self.nbunch_iter(nbunch)), ((n,self.pred[n]) for n in self.nbunch_iter(nbunch))) if weight is None: for (n,succ),(n2,pred) in nodes_nbrs: indeg = sum([len(data) for data in pred.values()]) outdeg = sum([len(data) for data in succ.values()]) yield (n, indeg + outdeg) else: # edge weighted graph - degree is sum of nbr edge weights for (n,succ),(n2,pred) in nodes_nbrs: deg = sum([d.get(weight,1) for data in pred.values() for d in data.values()]) deg += sum([d.get(weight,1) for data in succ.values() for d in data.values()]) yield (n, deg) def in_degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, in-degree). The node in-degree is the number of edges pointing in to the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, in-degree). See Also -------- degree, in_degree, out_degree, out_degree_iter Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_path([0,1,2,3]) >>> list(G.in_degree_iter(0)) # node 0 with degree 0 [(0, 0)] >>> list(G.in_degree_iter([0,1])) [(0, 0), (1, 1)] """ if nbunch is None: nodes_nbrs=self.pred.items() else: nodes_nbrs=((n,self.pred[n]) for n in self.nbunch_iter(nbunch)) if weight is None: for n,nbrs in nodes_nbrs: yield (n, sum([len(data) for data in nbrs.values()]) ) else: # edge weighted graph - degree is sum of nbr edge weights for n,pred in nodes_nbrs: deg = sum([d.get(weight,1) for data in pred.values() for d in data.values()]) yield (n, deg) def out_degree_iter(self, nbunch=None, weight=None): """Return an iterator for (node, out-degree). The node out-degree is the number of edges pointing out of the node. Parameters ---------- nbunch : iterable container, optional (default=all nodes) A container of nodes. The container will be iterated through once. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights. Returns ------- nd_iter : an iterator The iterator returns two-tuples of (node, out-degree). See Also -------- degree, in_degree, out_degree, in_degree_iter Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_path([0,1,2,3]) >>> list(G.out_degree_iter(0)) # node 0 with degree 1 [(0, 1)] >>> list(G.out_degree_iter([0,1])) [(0, 1), (1, 1)] """ if nbunch is None: nodes_nbrs=self.succ.items() else: nodes_nbrs=((n,self.succ[n]) for n in self.nbunch_iter(nbunch)) if weight is None: for n,nbrs in nodes_nbrs: yield (n, sum([len(data) for data in nbrs.values()]) ) else: for n,succ in nodes_nbrs: deg = sum([d.get(weight,1) for data in succ.values() for d in data.values()]) yield (n, deg) def is_multigraph(self): """Return True if graph is a multigraph, False otherwise.""" return True def is_directed(self): """Return True if graph is directed, False otherwise.""" return True def to_directed(self): """Return a directed copy of the graph. Returns ------- G : MultiDiGraph A deepcopy of the graph. Notes ----- If edges in both directions (u,v) and (v,u) exist in the graph, attributes for the new undirected edge will be a combination of the attributes of the directed edges. The edge data is updated in the (arbitrary) order that the edges are encountered. For more customized control of the edge attributes use add_edge(). This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar G=DiGraph(D) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. Examples -------- >>> G = nx.Graph() # or MultiGraph, etc >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1), (1, 0)] If already directed, return a (deep) copy >>> G = nx.MultiDiGraph() >>> G.add_path([0,1]) >>> H = G.to_directed() >>> H.edges() [(0, 1)] """ return deepcopy(self) def to_undirected(self, reciprocal=False): """Return an undirected representation of the digraph. Parameters ---------- reciprocal : bool (optional) If True only keep edges that appear in both directions in the original digraph. Returns ------- G : MultiGraph An undirected graph with the same name and nodes and with edge (u,v,data) if either (u,v,data) or (v,u,data) is in the digraph. If both edges exist in digraph and their edge data is different, only one edge is created with an arbitrary choice of which edge data to use. You must check and correct for this manually if desired. Notes ----- This returns a "deepcopy" of the edge, node, and graph attributes which attempts to completely copy all of the data and references. This is in contrast to the similar D=DiGraph(G) which returns a shallow copy of the data. See the Python copy module for more information on shallow and deep copies, http://docs.python.org/library/copy.html. """ H=MultiGraph() H.name=self.name H.add_nodes_from(self) if reciprocal is True: H.add_edges_from( (u,v,key,deepcopy(data)) for u,nbrs in self.adjacency_iter() for v,keydict in nbrs.items() for key,data in keydict.items() if self.has_edge(v,u,key)) else: H.add_edges_from( (u,v,key,deepcopy(data)) for u,nbrs in self.adjacency_iter() for v,keydict in nbrs.items() for key,data in keydict.items()) H.graph=deepcopy(self.graph) H.node=deepcopy(self.node) return H def subgraph(self, nbunch): """Return the subgraph induced on nodes in nbunch. The induced subgraph of the graph contains the nodes in nbunch and the edges between those nodes. Parameters ---------- nbunch : list, iterable A container of nodes which will be iterated through once. Returns ------- G : Graph A subgraph of the graph with the same edge attributes. Notes ----- The graph, edge or node attributes just point to the original graph. So changes to the node or edge structure will not be reflected in the original graph while changes to the attributes will. To create a subgraph with its own copy of the edge/node attributes use: nx.Graph(G.subgraph(nbunch)) If edge attributes are containers, a deep copy can be obtained using: G.subgraph(nbunch).copy() For an inplace reduction of a graph to a subgraph you can remove nodes: G.remove_nodes_from([ n in G if n not in set(nbunch)]) Examples -------- >>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc >>> G.add_path([0,1,2,3]) >>> H = G.subgraph([0,1,2]) >>> H.edges() [(0, 1), (1, 2)] """ bunch = self.nbunch_iter(nbunch) # create new graph and copy subgraph into it H = self.__class__() # copy node and attribute dictionaries for n in bunch: H.node[n]=self.node[n] # namespace shortcuts for speed H_succ=H.succ H_pred=H.pred self_succ=self.succ self_pred=self.pred # add nodes for n in H: H_succ[n]={} H_pred[n]={} # add edges for u in H_succ: Hnbrs=H_succ[u] for v,edgedict in self_succ[u].items(): if v in H_succ: # add both representations of edge: u-v and v-u # they share the same edgedict ed=edgedict.copy() Hnbrs[v]=ed H_pred[v][u]=ed H.graph=self.graph return H def reverse(self, copy=True): """Return the reverse of the graph. The reverse is a graph with the same nodes and edges but with the directions of the edges reversed. Parameters ---------- copy : bool optional (default=True) If True, return a new DiGraph holding the reversed edges. If False, reverse the reverse graph is created using the original graph (this changes the original graph). """ if copy: H = self.__class__(name="Reverse of (%s)"%self.name) H.add_nodes_from(self) H.add_edges_from( (v,u,k,deepcopy(d)) for u,v,k,d in self.edges(keys=True, data=True) ) H.graph=deepcopy(self.graph) H.node=deepcopy(self.node) else: self.pred,self.succ=self.succ,self.pred self.adj=self.succ H=self return H networkx-1.8.1/networkx/algorithms/0000775000175000017500000000000012177457361017331 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/boundary.py0000664000175000017500000000505412177456333021530 0ustar aricaric00000000000000""" Routines to find the boundary of a set of nodes. Edge boundaries are edges that have only one end in the set of nodes. Node boundaries are nodes outside the set of nodes that have an edge to a node in the set. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult (dschult@colgate.edu)""" # Copyright (C) 2004-2008 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__=['edge_boundary','node_boundary'] def edge_boundary(G, nbunch1, nbunch2=None): """Return the edge boundary. Edge boundaries are edges that have only one end in the given set of nodes. Parameters ----------- G : graph A networkx graph nbunch1 : list, container Interior node set nbunch2 : list, container Exterior node set. If None then it is set to all of the nodes in G not in nbunch1. Returns ------- elist : list List of edges Notes ------ Nodes in nbunch1 and nbunch2 that are not in G are ignored. nbunch1 and nbunch2 are usually meant to be disjoint, but in the interest of speed and generality, that is not required here. """ if nbunch2 is None: # Then nbunch2 is complement of nbunch1 nset1=set((n for n in nbunch1 if n in G)) return [(n1,n2) for n1 in nset1 for n2 in G[n1] \ if n2 not in nset1] nset2=set(nbunch2) return [(n1,n2) for n1 in nbunch1 if n1 in G for n2 in G[n1] \ if n2 in nset2] def node_boundary(G, nbunch1, nbunch2=None): """Return the node boundary. The node boundary is all nodes in the edge boundary of a given set of nodes that are in the set. Parameters ----------- G : graph A networkx graph nbunch1 : list, container Interior node set nbunch2 : list, container Exterior node set. If None then it is set to all of the nodes in G not in nbunch1. Returns ------- nlist : list List of nodes. Notes ------ Nodes in nbunch1 and nbunch2 that are not in G are ignored. nbunch1 and nbunch2 are usually meant to be disjoint, but in the interest of speed and generality, that is not required here. """ nset1=set(n for n in nbunch1 if n in G) bdy=set() for n1 in nset1: bdy.update(G[n1]) bdy -= nset1 if nbunch2 is not None: # else nbunch2 is complement of nbunch1 bdy &= set(nbunch2) return list(bdy) networkx-1.8.1/networkx/algorithms/components/0000775000175000017500000000000012177457361021516 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/components/weakly_connected.py0000664000175000017500000000711612177456333025411 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Weakly connected components. """ __authors__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)' 'Christopher Ellison']) # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['number_weakly_connected_components', 'weakly_connected_components', 'weakly_connected_component_subgraphs', 'is_weakly_connected' ] import networkx as nx def weakly_connected_components(G): """Return weakly connected components of G. """ if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. Use connected_components() """) seen={} components=[] for v in G: if v not in seen: c=_single_source_shortest_unipath_length(G,v) components.append(list(c.keys())) seen.update(c) components.sort(key=len,reverse=True) return components def number_weakly_connected_components(G): """Return the number of connected components in G. For directed graphs only. """ return len(weakly_connected_components(G)) def weakly_connected_component_subgraphs(G): """Return weakly connected components as subgraphs. Graph, node, and edge attributes are copied to the subgraphs. """ wcc=weakly_connected_components(G) graph_list=[] for c in wcc: graph_list.append(G.subgraph(c).copy()) return graph_list def is_weakly_connected(G): """Test directed graph for weak connectivity. Parameters ---------- G : NetworkX Graph A directed graph. Returns ------- connected : bool True if the graph is weakly connected, False otherwise. See Also -------- strongly_connected_components Notes ----- For directed graphs only. """ if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. See is_connected() for connectivity test.""") if len(G)==0: raise nx.NetworkXPointlessConcept( """Connectivity is undefined for the null graph.""") return len(weakly_connected_components(G)[0])==len(G) def _single_source_shortest_unipath_length(G,source,cutoff=None): """Compute the shortest path lengths from source to all reachable nodes. The direction of the edge between nodes is ignored. For directed graphs only. Parameters ---------- G : NetworkX graph source : node Starting node for path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- lengths : dictionary Dictionary of shortest path lengths keyed by target. """ # namespace speedups Gsucc = G.succ Gpred = G.pred seen={} # level (number of hops) when seen in BFS level=0 # the current level nextlevel = set([source]) # set of nodes to check at next level while nextlevel: thislevel=nextlevel # advance to next level nextlevel = set() # and start a new list (fringe) for v in thislevel: if v not in seen: seen[v]=level # set the level of vertex v nextlevel.update(Gsucc[v]) # add successors of v nextlevel.update(Gpred[v]) # add predecessors of v if (cutoff is not None and cutoff <= level): break level=level+1 return seen # return all path lengths as dictionary networkx-1.8.1/networkx/algorithms/components/__init__.py0000664000175000017500000000044612177456333023631 0ustar aricaric00000000000000from networkx.algorithms.components.connected import * from networkx.algorithms.components.strongly_connected import * from networkx.algorithms.components.weakly_connected import * from networkx.algorithms.components.attracting import * from networkx.algorithms.components.biconnected import * networkx-1.8.1/networkx/algorithms/components/connected.py0000664000175000017500000001042412177456333024031 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Connected components. """ __authors__ = "\n".join(['Eben Kenah', 'Aric Hagberg (hagberg@lanl.gov)' 'Christopher Ellison']) # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['number_connected_components', 'connected_components', 'connected_component_subgraphs', 'is_connected', 'node_connected_component', ] import networkx as nx def connected_components(G): """Return nodes in connected components of graph. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- comp : list of lists A list of nodes for each component of G. See Also -------- strongly_connected_components Notes ----- The list is ordered from largest connected component to smallest. For undirected graphs only. """ if G.is_directed(): raise nx.NetworkXError("""Not allowed for directed graph G. Use UG=G.to_undirected() to create an undirected graph.""") seen={} components=[] for v in G: if v not in seen: c=nx.single_source_shortest_path_length(G,v) components.append(list(c.keys())) seen.update(c) components.sort(key=len,reverse=True) return components def number_connected_components(G): """Return number of connected components in graph. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- n : integer Number of connected components See Also -------- connected_components Notes ----- For undirected graphs only. """ return len(connected_components(G)) def is_connected(G): """Test graph connectivity. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- connected : bool True if the graph is connected, false otherwise. Examples -------- >>> G=nx.path_graph(4) >>> print(nx.is_connected(G)) True See Also -------- connected_components Notes ----- For undirected graphs only. """ if G.is_directed(): raise nx.NetworkXError(\ """Not allowed for directed graph G. Use UG=G.to_undirected() to create an undirected graph.""") if len(G)==0: raise nx.NetworkXPointlessConcept( """Connectivity is undefined for the null graph.""") return len(nx.single_source_shortest_path_length(G, next(G.nodes_iter())))==len(G) def connected_component_subgraphs(G): """Return connected components as subgraphs. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- glist : list A list of graphs, one for each connected component of G. Examples -------- Get largest connected component as subgraph >>> G=nx.path_graph(4) >>> G.add_edge(5,6) >>> H=nx.connected_component_subgraphs(G)[0] See Also -------- connected_components Notes ----- The list is ordered from largest connected component to smallest. For undirected graphs only. Graph, node, and edge attributes are copied to the subgraphs. """ cc=connected_components(G) graph_list=[] for c in cc: graph_list.append(G.subgraph(c).copy()) return graph_list def node_connected_component(G,n): """Return nodes in connected components of graph containing node n. Parameters ---------- G : NetworkX Graph An undirected graph. n : node label A node in G Returns ------- comp : lists A list of nodes in component of G containing node n. See Also -------- connected_components Notes ----- For undirected graphs only. """ if G.is_directed(): raise nx.NetworkXError("""Not allowed for directed graph G. Use UG=G.to_undirected() to create an undirected graph.""") return list(nx.single_source_shortest_path_length(G,n).keys()) networkx-1.8.1/networkx/algorithms/components/biconnected.py0000664000175000017500000003417612177456333024356 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Biconnected components and articulation points. """ # Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from itertools import chain import networkx as nx __author__ = '\n'.join(['Jordi Torrents ', 'Dan Schult ', 'Aric Hagberg ']) __all__ = ['biconnected_components', 'biconnected_component_edges', 'biconnected_component_subgraphs', 'is_biconnected', 'articulation_points', ] def is_biconnected(G): """Return True if the graph is biconnected, False otherwise. A graph is biconnected if, and only if, it cannot be disconnected by removing only one node (and all edges incident on that node). If removing a node increases the number of disconnected components in the graph, that node is called an articulation point, or cut vertex. A biconnected graph has no articulation points. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- biconnected : bool True if the graph is biconnected, False otherwise. Raises ------ NetworkXError : If the input graph is not undirected. Examples -------- >>> G=nx.path_graph(4) >>> print(nx.is_biconnected(G)) False >>> G.add_edge(0,3) >>> print(nx.is_biconnected(G)) True See Also -------- biconnected_components, articulation_points, biconnected_component_edges, biconnected_component_subgraphs Notes ----- The algorithm to find articulation points and biconnected components is implemented using a non-recursive depth-first-search (DFS) that keeps track of the highest level that back edges reach in the DFS tree. A node `n` is an articulation point if, and only if, there exists a subtree rooted at `n` such that there is no back edge from any successor of `n` that links to a predecessor of `n` in the DFS tree. By keeping track of all the edges traversed by the DFS we can obtain the biconnected components because all edges of a bicomponent will be traversed consecutively between articulation points. References ---------- .. [1] Hopcroft, J.; Tarjan, R. (1973). "Efficient algorithms for graph manipulation". Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 """ bcc = list(biconnected_components(G)) if not bcc: # No bicomponents (it could be an empty graph) return False return len(bcc[0]) == len(G) def biconnected_component_edges(G): """Return a generator of lists of edges, one list for each biconnected component of the input graph. Biconnected components are maximal subgraphs such that the removal of a node (and all edges incident on that node) will not disconnect the subgraph. Note that nodes may be part of more than one biconnected component. Those nodes are articulation points, or cut vertices. However, each edge belongs to one, and only one, biconnected component. Notice that by convention a dyad is considered a biconnected component. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- edges : generator Generator of lists of edges, one list for each bicomponent. Raises ------ NetworkXError : If the input graph is not undirected. Examples -------- >>> G = nx.barbell_graph(4,2) >>> print(nx.is_biconnected(G)) False >>> components = nx.biconnected_component_edges(G) >>> G.add_edge(2,8) >>> print(nx.is_biconnected(G)) True >>> components = nx.biconnected_component_edges(G) See Also -------- is_biconnected, biconnected_components, articulation_points, biconnected_component_subgraphs Notes ----- The algorithm to find articulation points and biconnected components is implemented using a non-recursive depth-first-search (DFS) that keeps track of the highest level that back edges reach in the DFS tree. A node `n` is an articulation point if, and only if, there exists a subtree rooted at `n` such that there is no back edge from any successor of `n` that links to a predecessor of `n` in the DFS tree. By keeping track of all the edges traversed by the DFS we can obtain the biconnected components because all edges of a bicomponent will be traversed consecutively between articulation points. References ---------- .. [1] Hopcroft, J.; Tarjan, R. (1973). "Efficient algorithms for graph manipulation". Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 """ return sorted(_biconnected_dfs(G,components=True), key=len, reverse=True) def biconnected_components(G): """Return a generator of sets of nodes, one set for each biconnected component of the graph Biconnected components are maximal subgraphs such that the removal of a node (and all edges incident on that node) will not disconnect the subgraph. Note that nodes may be part of more than one biconnected component. Those nodes are articulation points, or cut vertices. The removal of articulation points will increase the number of connected components of the graph. Notice that by convention a dyad is considered a biconnected component. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- nodes : generator Generator of sets of nodes, one set for each biconnected component. Raises ------ NetworkXError : If the input graph is not undirected. Examples -------- >>> G = nx.barbell_graph(4,2) >>> print(nx.is_biconnected(G)) False >>> components = nx.biconnected_components(G) >>> G.add_edge(2,8) >>> print(nx.is_biconnected(G)) True >>> components = nx.biconnected_components(G) See Also -------- is_biconnected, articulation_points, biconnected_component_edges, biconnected_component_subgraphs Notes ----- The algorithm to find articulation points and biconnected components is implemented using a non-recursive depth-first-search (DFS) that keeps track of the highest level that back edges reach in the DFS tree. A node `n` is an articulation point if, and only if, there exists a subtree rooted at `n` such that there is no back edge from any successor of `n` that links to a predecessor of `n` in the DFS tree. By keeping track of all the edges traversed by the DFS we can obtain the biconnected components because all edges of a bicomponent will be traversed consecutively between articulation points. References ---------- .. [1] Hopcroft, J.; Tarjan, R. (1973). "Efficient algorithms for graph manipulation". Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 """ bicomponents = (set(chain.from_iterable(comp)) for comp in _biconnected_dfs(G,components=True)) return sorted(bicomponents, key=len, reverse=True) def biconnected_component_subgraphs(G): """Return a generator of graphs, one graph for each biconnected component of the input graph. Biconnected components are maximal subgraphs such that the removal of a node (and all edges incident on that node) will not disconnect the subgraph. Note that nodes may be part of more than one biconnected component. Those nodes are articulation points, or cut vertices. The removal of articulation points will increase the number of connected components of the graph. Notice that by convention a dyad is considered a biconnected component. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- graphs : generator Generator of graphs, one graph for each biconnected component. Raises ------ NetworkXError : If the input graph is not undirected. Examples -------- >>> G = nx.barbell_graph(4,2) >>> print(nx.is_biconnected(G)) False >>> subgraphs = nx.biconnected_component_subgraphs(G) See Also -------- is_biconnected, articulation_points, biconnected_component_edges, biconnected_components Notes ----- The algorithm to find articulation points and biconnected components is implemented using a non-recursive depth-first-search (DFS) that keeps track of the highest level that back edges reach in the DFS tree. A node `n` is an articulation point if, and only if, there exists a subtree rooted at `n` such that there is no back edge from any successor of `n` that links to a predecessor of `n` in the DFS tree. By keeping track of all the edges traversed by the DFS we can obtain the biconnected components because all edges of a bicomponent will be traversed consecutively between articulation points. Graph, node, and edge attributes are copied to the subgraphs. References ---------- .. [1] Hopcroft, J.; Tarjan, R. (1973). "Efficient algorithms for graph manipulation". Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 """ def edge_subgraph(G,edges): # create new graph and copy subgraph into it H = G.__class__() for u,v in edges: H.add_edge(u,v,attr_dict=G[u][v]) for n in H: H.node[n]=G.node[n].copy() H.graph=G.graph.copy() return H return (edge_subgraph(G,edges) for edges in sorted(_biconnected_dfs(G,components=True), key=len, reverse=True)) def articulation_points(G): """Return a generator of articulation points, or cut vertices, of a graph. An articulation point or cut vertex is any node whose removal (along with all its incident edges) increases the number of connected components of a graph. An undirected connected graph without articulation points is biconnected. Articulation points belong to more than one biconnected component of a graph. Notice that by convention a dyad is considered a biconnected component. Parameters ---------- G : NetworkX Graph An undirected graph. Returns ------- articulation points : generator generator of nodes Raises ------ NetworkXError : If the input graph is not undirected. Examples -------- >>> G = nx.barbell_graph(4,2) >>> print(nx.is_biconnected(G)) False >>> list(nx.articulation_points(G)) [6, 5, 4, 3] >>> G.add_edge(2,8) >>> print(nx.is_biconnected(G)) True >>> list(nx.articulation_points(G)) [] See Also -------- is_biconnected, biconnected_components, biconnected_component_edges, biconnected_component_subgraphs Notes ----- The algorithm to find articulation points and biconnected components is implemented using a non-recursive depth-first-search (DFS) that keeps track of the highest level that back edges reach in the DFS tree. A node `n` is an articulation point if, and only if, there exists a subtree rooted at `n` such that there is no back edge from any successor of `n` that links to a predecessor of `n` in the DFS tree. By keeping track of all the edges traversed by the DFS we can obtain the biconnected components because all edges of a bicomponent will be traversed consecutively between articulation points. References ---------- .. [1] Hopcroft, J.; Tarjan, R. (1973). "Efficient algorithms for graph manipulation". Communications of the ACM 16: 372–378. doi:10.1145/362248.362272 """ return _biconnected_dfs(G,components=False) def _biconnected_dfs(G, components=True): # depth-first search algorithm to generate articulation points # and biconnected components if G.is_directed(): raise nx.NetworkXError('Not allowed for directed graph G. ' 'Use UG=G.to_undirected() to create an ' 'undirected graph.') visited = set() for start in G: if start in visited: continue discovery = {start:0} # "time" of first discovery of node during search low = {start:0} root_children = 0 visited.add(start) edge_stack = [] stack = [(start, start, iter(G[start]))] while stack: grandparent, parent, children = stack[-1] try: child = next(children) if grandparent == child: continue if child in visited: if discovery[child] <= discovery[parent]: # back edge low[parent] = min(low[parent],discovery[child]) if components: edge_stack.append((parent,child)) else: low[child] = discovery[child] = len(discovery) visited.add(child) stack.append((parent, child, iter(G[child]))) if components: edge_stack.append((parent,child)) except StopIteration: stack.pop() if len(stack) > 1: if low[parent] >= discovery[grandparent]: if components: ind = edge_stack.index((grandparent,parent)) yield edge_stack[ind:] edge_stack=edge_stack[:ind] else: yield grandparent low[grandparent] = min(low[parent], low[grandparent]) elif stack: # length 1 so grandparent is root root_children += 1 if components: ind = edge_stack.index((grandparent,parent)) yield edge_stack[ind:] if not components: # root node is articulation point if it has more than 1 child if root_children > 1: yield start networkx-1.8.1/networkx/algorithms/components/tests/0000775000175000017500000000000012177457361022660 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/components/tests/test_strongly_connected.py0000664000175000017500000001117712177456333030201 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx import NetworkXError class TestStronglyConnected: def setUp(self): self.gc=[] G=nx.DiGraph() G.add_edges_from([(1,2),(2,3),(2,8),(3,4),(3,7), (4,5),(5,3),(5,6),(7,4),(7,6),(8,1),(8,7)]) C=[[3, 4, 5, 7], [1, 2, 8], [6]] self.gc.append((G,C)) G= nx.DiGraph() G.add_edges_from([(1,2),(1,3),(1,4),(4,2),(3,4),(2,3)]) C = [[2, 3, 4],[1]] self.gc.append((G,C)) G = nx.DiGraph() G.add_edges_from([(1,2),(2,3),(3,2),(2,1)]) C = [[1, 2, 3]] self.gc.append((G,C)) # Eppstein's tests G = nx.DiGraph({ 0:[1],1:[2,3],2:[4,5],3:[4,5],4:[6],5:[],6:[]}) C = [[0],[1],[2],[3],[4],[5],[6]] self.gc.append((G,C)) G = nx.DiGraph({0:[1],1:[2,3,4],2:[0,3],3:[4],4:[3]}) C = [[0,1,2],[3,4]] self.gc.append((G,C)) def test_tarjan(self): scc=nx.strongly_connected_components for G,C in self.gc: assert_equal(sorted([sorted(g) for g in scc(G)]),sorted(C)) def test_tarjan_recursive(self): scc=nx.strongly_connected_components_recursive for G,C in self.gc: assert_equal(sorted([sorted(g) for g in scc(G)]),sorted(C)) def test_kosaraju(self): scc=nx.kosaraju_strongly_connected_components for G,C in self.gc: assert_equal(sorted([sorted(g) for g in scc(G)]),sorted(C)) def test_number_strongly_connected_components(self): ncc=nx.number_strongly_connected_components for G,C in self.gc: assert_equal(ncc(G),len(C)) def test_is_strongly_connected(self): for G,C in self.gc: if len(C)==1: assert_true(nx.is_strongly_connected(G)) else: assert_false(nx.is_strongly_connected(G)) def test_strongly_connected_component_subgraphs(self): scc=nx.strongly_connected_component_subgraphs for G,C in self.gc: assert_equal(sorted([sorted(g.nodes()) for g in scc(G)]),sorted(C)) G,C=self.gc[0] G.add_edge(1,2,eattr='red') G.node[1]['nattr']='blue' G.graph['gattr']='green' sgs=scc(G)[1] assert_equal(sgs[1][2]['eattr'],'red') assert_equal(sgs.node[1]['nattr'],'blue') assert_equal(sgs.graph['gattr'],'green') sgs[1][2]['eattr']='blue' assert_equal(G[1][2]['eattr'],'red') assert_equal(sgs[1][2]['eattr'],'blue') def test_contract_scc1(self): G = nx.DiGraph() G.add_edges_from([(1,2),(2,3),(2,11),(2,12),(3,4),(4,3),(4,5), (5,6),(6,5),(6,7),(7,8),(7,9),(7,10),(8,9), (9,7),(10,6),(11,2),(11,4),(11,6),(12,6),(12,11)]) scc = nx.strongly_connected_components(G) cG = nx.condensation(G, scc) # DAG assert_true(nx.is_directed_acyclic_graph(cG)) # # nodes assert_equal(sorted(cG.nodes()),[0,1,2,3]) # # edges mapping={} for i,component in enumerate(scc): for n in component: mapping[n] = i edge=(mapping[2],mapping[3]) assert_true(cG.has_edge(*edge)) edge=(mapping[2],mapping[5]) assert_true(cG.has_edge(*edge)) edge=(mapping[3],mapping[5]) assert_true(cG.has_edge(*edge)) def test_contract_scc_isolate(self): # Bug found and fixed in [1687]. G = nx.DiGraph() G.add_edge(1,2) G.add_edge(2,1) scc = nx.strongly_connected_components(G) cG = nx.condensation(G, scc) assert_equal(cG.nodes(),[0]) assert_equal(cG.edges(),[]) def test_contract_scc_edge(self): G = nx.DiGraph() G.add_edge(1,2) G.add_edge(2,1) G.add_edge(2,3) G.add_edge(3,4) G.add_edge(4,3) scc = nx.strongly_connected_components(G) cG = nx.condensation(G, scc) assert_equal(cG.nodes(),[0,1]) if 1 in scc[0]: edge = (0,1) else: edge = (1,0) assert_equal(cG.edges(),[edge]) def test_connected_raise(self): G=nx.Graph() assert_raises(NetworkXError,nx.strongly_connected_components,G) assert_raises(NetworkXError,nx.kosaraju_strongly_connected_components,G) assert_raises(NetworkXError,nx.strongly_connected_components_recursive,G) assert_raises(NetworkXError,nx.strongly_connected_component_subgraphs,G) assert_raises(NetworkXError,nx.is_strongly_connected,G) assert_raises(NetworkXError,nx.condensation,G) networkx-1.8.1/networkx/algorithms/components/tests/test_attracting.py0000664000175000017500000000451112177456333026430 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestAttractingComponents(object): def setUp(self): self.G1 = nx.DiGraph() self.G1.add_edges_from([(5,11),(11,2),(11,9),(11,10), (7,11),(7,8),(8,9),(3,8),(3,10)]) self.G2 = nx.DiGraph() self.G2.add_edges_from([(0,1),(0,2),(1,1),(1,2),(2,1)]) self.G3 = nx.DiGraph() self.G3.add_edges_from([(0,1),(1,2),(2,1),(0,3),(3,4),(4,3)]) def test_attracting_components(self): ac = nx.attracting_components(self.G1) assert_true([2] in ac) assert_true([9] in ac) assert_true([10] in ac) ac = nx.attracting_components(self.G2) ac = [tuple(sorted(x)) for x in ac] assert_true(ac == [(1,2)]) ac = nx.attracting_components(self.G3) ac = [tuple(sorted(x)) for x in ac] assert_true((1,2) in ac) assert_true((3,4) in ac) assert_equal(len(ac), 2) def test_number_attacting_components(self): assert_equal(len(nx.attracting_components(self.G1)), 3) assert_equal(len(nx.attracting_components(self.G2)), 1) assert_equal(len(nx.attracting_components(self.G3)), 2) def test_is_attracting_component(self): assert_false(nx.is_attracting_component(self.G1)) assert_false(nx.is_attracting_component(self.G2)) assert_false(nx.is_attracting_component(self.G3)) g2 = self.G3.subgraph([1,2]) assert_true(nx.is_attracting_component(g2)) def test_attracting_component_subgraphs(self): subgraphs = nx.attracting_component_subgraphs(self.G1) for subgraph in subgraphs: assert_equal(len(subgraph), 1) self.G2.add_edge(1,2,eattr='red') # test attrs copied to subgraphs self.G2.node[2]['nattr']='blue' self.G2.graph['gattr']='green' subgraphs = nx.attracting_component_subgraphs(self.G2) assert_equal(len(subgraphs), 1) SG2=subgraphs[0] assert_true(1 in SG2) assert_true(2 in SG2) assert_equal(SG2[1][2]['eattr'],'red') assert_equal(SG2.node[2]['nattr'],'blue') assert_equal(SG2.graph['gattr'],'green') SG2.add_edge(1,2,eattr='blue') assert_equal(SG2[1][2]['eattr'],'blue') assert_equal(self.G2[1][2]['eattr'],'red') networkx-1.8.1/networkx/algorithms/components/tests/test_weakly_connected.py0000664000175000017500000000552212177456333027611 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx import NetworkXError class TestWeaklyConnected: def setUp(self): self.gc=[] G=nx.DiGraph() G.add_edges_from([(1,2),(2,3),(2,8),(3,4),(3,7), (4,5),(5,3),(5,6),(7,4),(7,6),(8,1),(8,7)]) C=[[3, 4, 5, 7], [1, 2, 8], [6]] self.gc.append((G,C)) G= nx.DiGraph() G.add_edges_from([(1,2),(1,3),(1,4),(4,2),(3,4),(2,3)]) C = [[2, 3, 4],[1]] self.gc.append((G,C)) G = nx.DiGraph() G.add_edges_from([(1,2),(2,3),(3,2),(2,1)]) C = [[1, 2, 3]] self.gc.append((G,C)) # Eppstein's tests G = nx.DiGraph({ 0:[1],1:[2,3],2:[4,5],3:[4,5],4:[6],5:[],6:[]}) C = [[0],[1],[2],[3],[4],[5],[6]] self.gc.append((G,C)) G = nx.DiGraph({0:[1],1:[2,3,4],2:[0,3],3:[4],4:[3]}) C = [[0,1,2],[3,4]] self.gc.append((G,C)) def test_weakly_connected_components(self): wcc=nx.weakly_connected_components cc=nx.connected_components for G,C in self.gc: U=G.to_undirected() w=sorted([sorted(g) for g in wcc(G)]) c=sorted([sorted(g) for g in cc(U)]) assert_equal(w,c) def test_number_weakly_connected_components(self): wcc=nx.number_weakly_connected_components cc=nx.number_connected_components for G,C in self.gc: U=G.to_undirected() w=wcc(G) c=cc(U) assert_equal(w,c) def test_weakly_connected_component_subgraphs(self): wcc=nx.weakly_connected_component_subgraphs cc=nx.connected_component_subgraphs for G,C in self.gc: U=G.to_undirected() w=sorted([sorted(g.nodes()) for g in wcc(G)]) c=sorted([sorted(g.nodes()) for g in cc(U)]) assert_equal(w,c) G,C=self.gc[0] G.add_edge(1,2,eattr='red') G.node[1]['nattr']='blue' G.graph['gattr']='green' sgs=wcc(G)[0] assert_equal(sgs[1][2]['eattr'],'red') assert_equal(sgs.node[1]['nattr'],'blue') assert_equal(sgs.graph['gattr'],'green') sgs[1][2]['eattr']='blue' assert_equal(G[1][2]['eattr'],'red') assert_equal(sgs[1][2]['eattr'],'blue') def test_is_weakly_connected(self): wcc=nx.is_weakly_connected cc=nx.is_connected for G,C in self.gc: U=G.to_undirected() assert_equal(wcc(G),cc(U)) def test_connected_raise(self): G=nx.Graph() assert_raises(NetworkXError,nx.weakly_connected_components,G) assert_raises(NetworkXError,nx.number_weakly_connected_components,G) assert_raises(NetworkXError,nx.weakly_connected_component_subgraphs,G) assert_raises(NetworkXError,nx.is_weakly_connected,G) networkx-1.8.1/networkx/algorithms/components/tests/test_biconnected.py0000664000175000017500000001403012177456333026542 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx.algorithms.components import biconnected def assert_components_equal(x,y): sx = set((frozenset([frozenset(e) for e in c]) for c in x)) sy = set((frozenset([frozenset(e) for e in c]) for c in y)) assert_equal(sx,sy) def test_barbell(): G=nx.barbell_graph(8,4) G.add_path([7,20,21,22]) G.add_cycle([22,23,24,25]) pts=set(biconnected.articulation_points(G)) assert_equal(pts,set([7,8,9,10,11,12,20,21,22])) answer = [set([12, 13, 14, 15, 16, 17, 18, 19]), set([0, 1, 2, 3, 4, 5, 6, 7]), set([22, 23, 24, 25]), set([11, 12]), set([10, 11]), set([9, 10]), set([8, 9]), set([7, 8]), set([21, 22]), set([20, 21]), set([7, 20])] bcc=list(biconnected.biconnected_components(G)) bcc.sort(key=len, reverse=True) assert_equal(bcc,answer) G.add_edge(2,17) pts=set(biconnected.articulation_points(G)) assert_equal(pts,set([7,20,21,22])) def test_articulation_points_cycle(): G=nx.cycle_graph(3) G.add_cycle([1,3,4]) pts=set(biconnected.articulation_points(G)) assert_equal(pts,set([1])) def test_is_biconnected(): G=nx.cycle_graph(3) assert_true(biconnected.is_biconnected(G)) G.add_cycle([1,3,4]) assert_false(biconnected.is_biconnected(G)) def test_empty_is_biconnected(): G=nx.empty_graph(5) assert_false(biconnected.is_biconnected(G)) G.add_edge(0,1) assert_false(biconnected.is_biconnected(G)) def test_biconnected_components_cycle(): G=nx.cycle_graph(3) G.add_cycle([1,3,4]) pts = set(map(frozenset,biconnected.biconnected_components(G))) assert_equal(pts,set([frozenset([0,1,2]),frozenset([1,3,4])])) def test_biconnected_component_subgraphs_cycle(): G=nx.cycle_graph(3) G.add_cycle([1,3,4,5]) G.add_edge(1,3,eattr='red') # test copying of edge data G.node[1]['nattr']='blue' G.graph['gattr']='green' Gc = set(biconnected.biconnected_component_subgraphs(G)) assert_equal(len(Gc),2) g1,g2=Gc if 0 in g1: assert_true(nx.is_isomorphic(g1,nx.Graph([(0,1),(0,2),(1,2)]))) assert_true(nx.is_isomorphic(g2,nx.Graph([(1,3),(1,5),(3,4),(4,5)]))) assert_equal(g2[1][3]['eattr'],'red') assert_equal(g2.node[1]['nattr'],'blue') assert_equal(g2.graph['gattr'],'green') g2[1][3]['eattr']='blue' assert_equal(g2[1][3]['eattr'],'blue') assert_equal(G[1][3]['eattr'],'red') else: assert_true(nx.is_isomorphic(g1,nx.Graph([(1,3),(1,5),(3,4),(4,5)]))) assert_true(nx.is_isomorphic(g2,nx.Graph([(0,1),(0,2),(1,2)]))) assert_equal(g1[1][3]['eattr'],'red') assert_equal(g1.node[1]['nattr'],'blue') assert_equal(g1.graph['gattr'],'green') g1[1][3]['eattr']='blue' assert_equal(g1[1][3]['eattr'],'blue') assert_equal(G[1][3]['eattr'],'red') def test_biconnected_components1(): # graph example from # http://www.ibluemojo.com/school/articul_algorithm.html edges=[(0,1), (0,5), (0,6), (0,14), (1,5), (1,6), (1,14), (2,4), (2,10), (3,4), (3,15), (4,6), (4,7), (4,10), (5,14), (6,14), (7,9), (8,9), (8,12), (8,13), (10,15), (11,12), (11,13), (12,13)] G=nx.Graph(edges) pts = set(biconnected.articulation_points(G)) assert_equal(pts,set([4,6,7,8,9])) comps = list(biconnected.biconnected_component_edges(G)) answer = [ [(3,4),(15,3),(10,15),(10,4),(2,10),(4,2)], [(13,12),(13,8),(11,13),(12,11),(8,12)], [(9,8)], [(7,9)], [(4,7)], [(6,4)], [(14,0),(5,1),(5,0),(14,5),(14,1),(6,14),(6,0),(1,6),(0,1)], ] assert_components_equal(comps,answer) def test_biconnected_components2(): G=nx.Graph() G.add_cycle('ABC') G.add_cycle('CDE') G.add_cycle('FIJHG') G.add_cycle('GIJ') G.add_edge('E','G') comps = list(biconnected.biconnected_component_edges(G)) answer = [ [tuple('GF'),tuple('FI'),tuple('IG'),tuple('IJ'),tuple('JG'),tuple('JH'),tuple('HG')], [tuple('EG')], [tuple('CD'),tuple('DE'),tuple('CE')], [tuple('AB'),tuple('BC'),tuple('AC')] ] assert_components_equal(comps,answer) def test_biconnected_davis(): D = nx.davis_southern_women_graph() bcc = list(biconnected.biconnected_components(D))[0] assert_true(set(D) == bcc) # All nodes in a giant bicomponent # So no articulation points assert_equal(list(biconnected.articulation_points(D)),[]) def test_biconnected_karate(): K = nx.karate_club_graph() answer = [set([0, 1, 2, 3, 7, 8, 9, 12, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]), set([0, 4, 5, 6, 10, 16]), set([0, 11])] bcc = list(biconnected.biconnected_components(K)) bcc.sort(key=len, reverse=True) assert_true(list(biconnected.biconnected_components(K)) == answer) assert_equal(list(biconnected.articulation_points(K)),[0]) def test_biconnected_eppstein(): # tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py G1 = nx.Graph({ 0: [1,2,5], 1: [0,5], 2: [0,3,4], 3: [2,4,5,6], 4: [2,3,5,6], 5: [0,1,3,4], 6: [3,4]}) G2 = nx.Graph({ 0: [2,5], 1: [3,8], 2: [0,3,5], 3: [1,2,6,8], 4: [7], 5: [0,2], 6: [3,8], 7: [4], 8: [1,3,6]}) assert_true(biconnected.is_biconnected(G1)) assert_false(biconnected.is_biconnected(G2)) answer_G2 = [set([1, 3, 6, 8]), set([0, 2, 5]), set([2, 3]), set([4, 7])] bcc = list(biconnected.biconnected_components(G2)) bcc.sort(key=len, reverse=True) assert_equal(bcc, answer_G2) networkx-1.8.1/networkx/algorithms/components/tests/test_connected.py0000664000175000017500000000530012177456333026227 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx import convert_node_labels_to_integers as cnlti from networkx import NetworkXError class TestConnected: def setUp(self): G1=cnlti(nx.grid_2d_graph(2,2),first_label=0,ordering="sorted") G2=cnlti(nx.lollipop_graph(3,3),first_label=4,ordering="sorted") G3=cnlti(nx.house_graph(),first_label=10,ordering="sorted") self.G=nx.union(G1,G2) self.G=nx.union(self.G,G3) self.DG=nx.DiGraph([(1,2),(1,3),(2,3)]) self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1) def test_connected_components(self): cc=nx.connected_components G=self.G C=[[0, 1, 2, 3], [4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14]] assert_equal(sorted([sorted(g) for g in cc(G)]),sorted(C)) def test_number_connected_components(self): ncc=nx.number_connected_components assert_equal(ncc(self.G),3) def test_number_connected_components2(self): ncc=nx.number_connected_components assert_equal(ncc(self.grid),1) def test_connected_components2(self): cc=nx.connected_components G=self.grid C=[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]] assert_equal(sorted([sorted(g) for g in cc(G)]),sorted(C)) def test_node_connected_components(self): ncc=nx.node_connected_component G=self.grid C=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] assert_equal(sorted(ncc(G,1)),sorted(C)) def test_connected_component_subgraphs(self): G=self.grid G.add_edge(1,2,eattr='red') # test attributes copied to subgraphs G.node[1]['nattr']='blue' G.graph['gattr']='green' ccs=nx.connected_component_subgraphs(G) assert_equal(len(ccs),1) sg=ccs[0] assert_equal(sorted(sg.nodes()),list(range(1,17))) assert_equal(sg[1][2]['eattr'],'red') assert_equal(sg.node[1]['nattr'],'blue') assert_equal(sg.graph['gattr'],'green') sg[1][2]['eattr']='blue' assert_equal(G[1][2]['eattr'],'red') assert_equal(sg[1][2]['eattr'],'blue') def test_is_connected(self): assert_true(nx.is_connected(self.grid)) G=nx.Graph() G.add_nodes_from([1,2]) assert_false(nx.is_connected(G)) def test_connected_raise(self): assert_raises(NetworkXError,nx.connected_components,self.DG) assert_raises(NetworkXError,nx.number_connected_components,self.DG) assert_raises(NetworkXError,nx.connected_component_subgraphs,self.DG) assert_raises(NetworkXError,nx.node_connected_component,self.DG,1) assert_raises(NetworkXError,nx.is_connected,self.DG) networkx-1.8.1/networkx/algorithms/components/attracting.py0000664000175000017500000000632312177456333024232 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Attracting components. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __authors__ = "\n".join(['Christopher Ellison']) __all__ = ['number_attracting_components', 'attracting_components', 'is_attracting_component', 'attracting_component_subgraphs', ] def attracting_components(G): """Returns a list of attracting components in `G`. An attracting component in a directed graph `G` is a strongly connected component with the property that a random walker on the graph will never leave the component, once it enters the component. The nodes in attracting components can also be thought of as recurrent nodes. If a random walker enters the attractor containing the node, then the node will be visited infinitely often. Parameters ---------- G : DiGraph, MultiDiGraph The graph to be analyzed. Returns ------- attractors : list The list of attracting components, sorted from largest attracting component to smallest attracting component. See Also -------- number_attracting_components is_attracting_component attracting_component_subgraphs """ scc = nx.strongly_connected_components(G) cG = nx.condensation(G, scc) attractors = [scc[n] for n in cG if cG.out_degree(n) == 0] attractors.sort(key=len,reverse=True) return attractors def number_attracting_components(G): """Returns the number of attracting components in `G`. Parameters ---------- G : DiGraph, MultiDiGraph The graph to be analyzed. Returns ------- n : int The number of attracting components in G. See Also -------- attracting_components is_attracting_component attracting_component_subgraphs """ n = len(attracting_components(G)) return n def is_attracting_component(G): """Returns True if `G` consists of a single attracting component. Parameters ---------- G : DiGraph, MultiDiGraph The graph to be analyzed. Returns ------- attracting : bool True if `G` has a single attracting component. Otherwise, False. See Also -------- attracting_components number_attracting_components attracting_component_subgraphs """ ac = attracting_components(G) if len(ac[0]) == len(G): attracting = True else: attracting = False return attracting def attracting_component_subgraphs(G): """Returns a list of attracting component subgraphs from `G`. Parameters ---------- G : DiGraph, MultiDiGraph The graph to be analyzed. Returns ------- subgraphs : list A list of node-induced subgraphs of the attracting components of `G`. Notes ----- Graph, node, and edge attributes are copied to the subgraphs. See Also -------- attracting_components number_attracting_components is_attracting_component """ subgraphs = [G.subgraph(ac).copy() for ac in attracting_components(G)] return subgraphs networkx-1.8.1/networkx/algorithms/components/strongly_connected.py0000664000175000017500000002303212177456333025771 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Strongly connected components. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __authors__ = "\n".join(['Eben Kenah', 'Aric Hagberg (hagberg@lanl.gov)' 'Christopher Ellison', 'Ben Edwards (bedwards@cs.unm.edu)']) __all__ = ['number_strongly_connected_components', 'strongly_connected_components', 'strongly_connected_component_subgraphs', 'is_strongly_connected', 'strongly_connected_components_recursive', 'kosaraju_strongly_connected_components', 'condensation'] def strongly_connected_components(G): """Return nodes in strongly connected components of graph. Parameters ---------- G : NetworkX Graph An directed graph. Returns ------- comp : list of lists A list of nodes for each component of G. The list is ordered from largest connected component to smallest. Raises ------ NetworkXError: If G is undirected. See Also -------- connected_components, weakly_connected_components Notes ----- Uses Tarjan's algorithm with Nuutila's modifications. Nonrecursive version of algorithm. References ---------- .. [1] Depth-first search and linear graph algorithms, R. Tarjan SIAM Journal of Computing 1(2):146-160, (1972). .. [2] On finding the strongly connected components in a directed graph. E. Nuutila and E. Soisalon-Soinen Information Processing Letters 49(1): 9-14, (1994).. """ if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. Use connected_components() """) preorder={} lowlink={} scc_found={} scc_queue = [] scc_list=[] i=0 # Preorder counter for source in G: if source not in scc_found: queue=[source] while queue: v=queue[-1] if v not in preorder: i=i+1 preorder[v]=i done=1 v_nbrs=G[v] for w in v_nbrs: if w not in preorder: queue.append(w) done=0 break if done==1: lowlink[v]=preorder[v] for w in v_nbrs: if w not in scc_found: if preorder[w]>preorder[v]: lowlink[v]=min([lowlink[v],lowlink[w]]) else: lowlink[v]=min([lowlink[v],preorder[w]]) queue.pop() if lowlink[v]==preorder[v]: scc_found[v]=True scc=[v] while scc_queue and preorder[scc_queue[-1]]>preorder[v]: k=scc_queue.pop() scc_found[k]=True scc.append(k) scc_list.append(scc) else: scc_queue.append(v) scc_list.sort(key=len,reverse=True) return scc_list def kosaraju_strongly_connected_components(G,source=None): """Return nodes in strongly connected components of graph. Parameters ---------- G : NetworkX Graph An directed graph. Returns ------- comp : list of lists A list of nodes for each component of G. The list is ordered from largest connected component to smallest. Raises ------ NetworkXError: If G is undirected See Also -------- connected_components Notes ----- Uses Kosaraju's algorithm. """ if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. Use connected_components() """) components=[] G=G.reverse(copy=False) post=list(nx.dfs_postorder_nodes(G,source=source)) G=G.reverse(copy=False) seen={} while post: r=post.pop() if r in seen: continue c=nx.dfs_preorder_nodes(G,r) new=[v for v in c if v not in seen] seen.update([(u,True) for u in new]) components.append(new) components.sort(key=len,reverse=True) return components def strongly_connected_components_recursive(G): """Return nodes in strongly connected components of graph. Recursive version of algorithm. Parameters ---------- G : NetworkX Graph An directed graph. Returns ------- comp : list of lists A list of nodes for each component of G. The list is ordered from largest connected component to smallest. Raises ------ NetworkXError : If G is undirected See Also -------- connected_components Notes ----- Uses Tarjan's algorithm with Nuutila's modifications. References ---------- .. [1] Depth-first search and linear graph algorithms, R. Tarjan SIAM Journal of Computing 1(2):146-160, (1972). .. [2] On finding the strongly connected components in a directed graph. E. Nuutila and E. Soisalon-Soinen Information Processing Letters 49(1): 9-14, (1994).. """ def visit(v,cnt): root[v]=cnt visited[v]=cnt cnt+=1 stack.append(v) for w in G[v]: if w not in visited: visit(w,cnt) if w not in component: root[v]=min(root[v],root[w]) if root[v]==visited[v]: component[v]=root[v] tmpc=[v] # hold nodes in this component while stack[-1]!=v: w=stack.pop() component[w]=root[v] tmpc.append(w) stack.remove(v) scc.append(tmpc) # add to scc list if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. Use connected_components() """) scc=[] visited={} component={} root={} cnt=0 stack=[] for source in G: if source not in visited: visit(source,cnt) scc.sort(key=len,reverse=True) return scc def strongly_connected_component_subgraphs(G): """Return strongly connected components as subgraphs. Parameters ---------- G : NetworkX Graph A graph. Returns ------- glist : list A list of graphs, one for each strongly connected component of G. See Also -------- connected_component_subgraphs Notes ----- The list is ordered from largest strongly connected component to smallest. Graph, node, and edge attributes are copied to the subgraphs. """ cc=strongly_connected_components(G) graph_list=[] for c in cc: graph_list.append(G.subgraph(c).copy()) return graph_list def number_strongly_connected_components(G): """Return number of strongly connected components in graph. Parameters ---------- G : NetworkX graph A directed graph. Returns ------- n : integer Number of strongly connected components See Also -------- connected_components Notes ----- For directed graphs only. """ return len(strongly_connected_components(G)) def is_strongly_connected(G): """Test directed graph for strong connectivity. Parameters ---------- G : NetworkX Graph A directed graph. Returns ------- connected : bool True if the graph is strongly connected, False otherwise. See Also -------- strongly_connected_components Notes ----- For directed graphs only. """ if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. See is_connected() for connectivity test.""") if len(G)==0: raise nx.NetworkXPointlessConcept( """Connectivity is undefined for the null graph.""") return len(strongly_connected_components(G)[0])==len(G) def condensation(G, scc=None): """Returns the condensation of G. The condensation of G is the graph with each of the strongly connected components contracted into a single node. Parameters ---------- G : NetworkX DiGraph A directed graph. scc: list (optional, default=None) A list of strongly connected components. If provided, the elements in `scc` must partition the nodes in `G`. If not provided, it will be calculated as scc=nx.strongly_connected_components(G). Returns ------- C : NetworkX DiGraph The condensation of G. The node labels are integers corresponding to the index of the component in the list of strongly connected components. Raises ------ NetworkXError: If G is not directed Notes ----- After contracting all strongly connected components to a single node, the resulting graph is a directed acyclic graph. """ if not G.is_directed(): raise nx.NetworkXError("""Not allowed for undirected graph G. See is_connected() for connectivity test.""") if scc is None: scc = nx.strongly_connected_components(G) mapping = {} C = nx.DiGraph() for i,component in enumerate(scc): for n in component: mapping[n] = i C.add_nodes_from(range(len(scc))) for u,v in G.edges(): if mapping[u] != mapping[v]: C.add_edge(mapping[u],mapping[v]) return C networkx-1.8.1/networkx/algorithms/graphical.py0000664000175000017500000003127612177456333021644 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Test sequences for graphiness. """ # Copyright (C) 2004-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from collections import defaultdict import heapq import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult (dschult@colgate.edu)' 'Joel Miller (joel.c.miller.research@gmail.com)' 'Ben Edwards' 'Brian Cloteaux ']) __all__ = ['is_graphical', 'is_multigraphical', 'is_pseudographical', 'is_digraphical', 'is_valid_degree_sequence_erdos_gallai', 'is_valid_degree_sequence_havel_hakimi', 'is_valid_degree_sequence', # deprecated ] def is_graphical(sequence, method='eg'): """Returns True if sequence is a valid degree sequence. A degree sequence is valid if some graph can realize it. Parameters ---------- sequence : list or iterable container A sequence of integer node degrees method : "eg" | "hh" The method used to validate the degree sequence. "eg" corresponds to the ErdÅ‘s-Gallai algorithm, and "hh" to the Havel-Hakimi algorithm. Returns ------- valid : bool True if the sequence is a valid degree sequence and False if not. Examples -------- >>> G = nx.path_graph(4) >>> sequence = G.degree().values() >>> nx.is_valid_degree_sequence(sequence) True References ---------- ErdÅ‘s-Gallai [EG1960]_, [choudum1986]_ Havel-Hakimi [havel1955]_, [hakimi1962]_, [CL1996]_ """ if method == 'eg': valid = is_valid_degree_sequence_erdos_gallai(list(sequence)) elif method == 'hh': valid = is_valid_degree_sequence_havel_hakimi(list(sequence)) else: msg = "`method` must be 'eg' or 'hh'" raise nx.NetworkXException(msg) return valid is_valid_degree_sequence = is_graphical def _basic_graphical_tests(deg_sequence): # Sort and perform some simple tests on the sequence if not nx.utils.is_list_of_ints(deg_sequence): raise nx.NetworkXUnfeasible p = len(deg_sequence) num_degs = [0]*p dmax, dmin, dsum, n = 0, p, 0, 0 for d in deg_sequence: # Reject if degree is negative or larger than the sequence length if d<0 or d>=p: raise nx.NetworkXUnfeasible # Process only the non-zero integers elif d>0: dmax, dmin, dsum, n = max(dmax,d), min(dmin,d), dsum+d, n+1 num_degs[d] += 1 # Reject sequence if it has odd sum or is oversaturated if dsum%2 or dsum>n*(n-1): raise nx.NetworkXUnfeasible return dmax,dmin,dsum,n,num_degs def is_valid_degree_sequence_havel_hakimi(deg_sequence): r"""Returns True if deg_sequence can be realized by a simple graph. The validation proceeds using the Havel-Hakimi theorem. Worst-case run time is: O(s) where s is the sum of the sequence. Parameters ---------- deg_sequence : list A list of integers where each element specifies the degree of a node in a graph. Returns ------- valid : bool True if deg_sequence is graphical and False if not. Notes ----- The ZZ condition says that for the sequence d if .. math:: |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)} then d is graphical. This was shown in Theorem 6 in [1]_. References ---------- .. [1] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992). [havel1955]_, [hakimi1962]_, [CL1996]_ """ try: dmax,dmin,dsum,n,num_degs = _basic_graphical_tests(deg_sequence) except nx.NetworkXUnfeasible: return False # Accept if sequence has no non-zero degrees or passes the ZZ condition if n==0 or 4*dmin*n >= (dmax+dmin+1) * (dmax+dmin+1): return True modstubs = [0]*(dmax+1) # Successively reduce degree sequence by removing the maximum degree while n > 0: # Retrieve the maximum degree in the sequence while num_degs[dmax] == 0: dmax -= 1; # If there are not enough stubs to connect to, then the sequence is # not graphical if dmax > n-1: return False # Remove largest stub in list num_degs[dmax], n = num_degs[dmax]-1, n-1 # Reduce the next dmax largest stubs mslen = 0 k = dmax for i in range(dmax): while num_degs[k] == 0: k -= 1 num_degs[k], n = num_degs[k]-1, n-1 if k > 1: modstubs[mslen] = k-1 mslen += 1 # Add back to the list any non-zero stubs that were removed for i in range(mslen): stub = modstubs[i] num_degs[stub], n = num_degs[stub]+1, n+1 return True def is_valid_degree_sequence_erdos_gallai(deg_sequence): r"""Returns True if deg_sequence can be realized by a simple graph. The validation is done using the ErdÅ‘s-Gallai theorem [EG1960]_. Parameters ---------- deg_sequence : list A list of integers Returns ------- valid : bool True if deg_sequence is graphical and False if not. Notes ----- This implementation uses an equivalent form of the ErdÅ‘s-Gallai criterion. Worst-case run time is: O(n) where n is the length of the sequence. Specifically, a sequence d is graphical if and only if the sum of the sequence is even and for all strong indices k in the sequence, .. math:: \sum_{i=1}^{k} d_i \leq k(k-1) + \sum_{j=k+1}^{n} \min(d_i,k) = k(n-1) - ( k \sum_{j=0}^{k-1} n_j - \sum_{j=0}^{k-1} j n_j ) A strong index k is any index where `d_k \geq k` and the value `n_j` is the number of occurrences of j in d. The maximal strong index is called the Durfee index. This particular rearrangement comes from the proof of Theorem 3 in [2]_. The ZZ condition says that for the sequence d if .. math:: |d| >= \frac{(\max(d) + \min(d) + 1)^2}{4*\min(d)} then d is graphical. This was shown in Theorem 6 in [2]_. References ---------- .. [1] A. Tripathi and S. Vijay. "A note on a theorem of ErdÅ‘s & Gallai", Discrete Mathematics, 265, pp. 417-420 (2003). .. [2] I.E. Zverovich and V.E. Zverovich. "Contributions to the theory of graphic sequences", Discrete Mathematics, 105, pp. 292-303 (1992). [EG1960]_, [choudum1986]_ """ try: dmax,dmin,dsum,n,num_degs = _basic_graphical_tests(deg_sequence) except nx.NetworkXUnfeasible: return False # Accept if sequence has no non-zero degrees or passes the ZZ condition if n==0 or 4*dmin*n >= (dmax+dmin+1) * (dmax+dmin+1): return True # Perform the EG checks using the reformulation of Zverovich and Zverovich k, sum_deg, sum_nj, sum_jnj = 0, 0, 0, 0 for dk in range(dmax, dmin-1, -1): if dk < k+1: # Check if already past Durfee index return True if num_degs[dk] > 0: run_size = num_degs[dk] # Process a run of identical-valued degrees if dk < k+run_size: # Check if end of run is past Durfee index run_size = dk-k # Adjust back to Durfee index sum_deg += run_size * dk for v in range(run_size): sum_nj += num_degs[k+v] sum_jnj += (k+v) * num_degs[k+v] k += run_size if sum_deg > k*(n-1) - k*sum_nj + sum_jnj: return False return True def is_multigraphical(sequence): """Returns True if some multigraph can realize the sequence. Parameters ---------- deg_sequence : list A list of integers Returns ------- valid : bool True if deg_sequence is a multigraphic degree sequence and False if not. Notes ----- The worst-case run time is O(n) where n is the length of the sequence. References ---------- .. [1] S. L. Hakimi. "On the realizability of a set of integers as degrees of the vertices of a linear graph", J. SIAM, 10, pp. 496-506 (1962). """ deg_sequence = list(sequence) if not nx.utils.is_list_of_ints(deg_sequence): return False dsum, dmax = 0, 0 for d in deg_sequence: if d<0: return False dsum, dmax = dsum+d, max(dmax,d) if dsum%2 or dsum<2*dmax: return False return True def is_pseudographical(sequence): """Returns True if some pseudograph can realize the sequence. Every nonnegative integer sequence with an even sum is pseudographical (see [1]_). Parameters ---------- sequence : list or iterable container A sequence of integer node degrees Returns ------- valid : bool True if the sequence is a pseudographic degree sequence and False if not. Notes ----- The worst-case run time is O(n) where n is the length of the sequence. References ---------- .. [1] F. Boesch and F. Harary. "Line removal algorithms for graphs and their degree lists", IEEE Trans. Circuits and Systems, CAS-23(12), pp. 778-782 (1976). """ s = list(sequence) if not nx.utils.is_list_of_ints(s): return False return sum(s)%2 == 0 and min(s) >= 0 def is_digraphical(in_sequence, out_sequence): r"""Returns True if some directed graph can realize the in- and out-degree sequences. Parameters ---------- in_sequence : list or iterable container A sequence of integer node in-degrees out_sequence : list or iterable container A sequence of integer node out-degrees Returns ------- valid : bool True if in and out-sequences are digraphic False if not. Notes ----- This algorithm is from Kleitman and Wang [1]_. The worst case runtime is O(s * log n) where s and n are the sum and length of the sequences respectively. References ---------- .. [1] D.J. Kleitman and D.L. Wang Algorithms for Constructing Graphs and Digraphs with Given Valences and Factors, Discrete Mathematics, 6(1), pp. 79-88 (1973) """ in_deg_sequence = list(in_sequence) out_deg_sequence = list(out_sequence) if not nx.utils.is_list_of_ints(in_deg_sequence): return False if not nx.utils.is_list_of_ints(out_deg_sequence): return False # Process the sequences and form two heaps to store degree pairs with # either zero or non-zero out degrees sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence) maxn = max(nin, nout) maxin = 0 if maxn==0: return True stubheap, zeroheap = [ ], [ ] for n in range(maxn): in_deg, out_deg = 0, 0 if n 0: stubheap.append((-1*out_deg, -1*in_deg)) elif out_deg > 0: zeroheap.append(-1*out_deg) if sumin != sumout: return False heapq.heapify(stubheap) heapq.heapify(zeroheap) modstubs = [(0,0)]*(maxin+1) # Successively reduce degree sequence by removing the maximum out degree while stubheap: # Take the first value in the sequence with non-zero in degree (freeout, freein) = heapq.heappop( stubheap ) freein *= -1 if freein > len(stubheap)+len(zeroheap): return False # Attach out stubs to the nodes with the most in stubs mslen = 0 for i in range(freein): if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0]): stubout = heapq.heappop(zeroheap) stubin = 0 else: (stubout, stubin) = heapq.heappop(stubheap) if stubout == 0: return False # Check if target is now totally connected if stubout+1<0 or stubin<0: modstubs[mslen] = (stubout+1, stubin) mslen += 1 # Add back the nodes to the heap that still have available stubs for i in range(mslen): stub = modstubs[i] if stub[1] < 0: heapq.heappush(stubheap, stub) else: heapq.heappush(zeroheap, stub[0]) if freeout<0: heapq.heappush(zeroheap, freeout) return True networkx-1.8.1/networkx/algorithms/richclub.py0000664000175000017500000000667412177456333021511 0ustar aricaric00000000000000# -*- coding: utf-8 -*- import networkx as nx __author__ = """\n""".join(['Ben Edwards', 'Aric Hagberg ']) __all__ = ['rich_club_coefficient'] def rich_club_coefficient(G, normalized=True, Q=100): """Return the rich-club coefficient of the graph G. The rich-club coefficient is the ratio, for every degree k, of the number of actual to the number of potential edges for nodes with degree greater than k: .. math:: \\phi(k) = \\frac{2 Ek}{Nk(Nk-1)} where Nk is the number of nodes with degree larger than k, and Ek be the number of edges among those nodes. Parameters ---------- G : NetworkX graph normalized : bool (optional) Normalize using randomized network (see [1]_) Q : float (optional, default=100) If normalized=True build a random network by performing Q*M double-edge swaps, where M is the number of edges in G, to use as a null-model for normalization. Returns ------- rc : dictionary A dictionary, keyed by degree, with rich club coefficient values. Examples -------- >>> G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)]) >>> rc = nx.rich_club_coefficient(G,normalized=False) >>> rc[0] # doctest: +SKIP 0.4 Notes ------ The rich club definition and algorithm are found in [1]_. This algorithm ignores any edge weights and is not defined for directed graphs or graphs with parallel edges or self loops. Estimates for appropriate values of Q are found in [2]_. References ---------- .. [1] Julian J. McAuley, Luciano da Fontoura Costa, and Tibério S. Caetano, "The rich-club phenomenon across complex network hierarchies", Applied Physics Letters Vol 91 Issue 8, August 2007. http://arxiv.org/abs/physics/0701290 .. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon, "Uniform generation of random graphs with arbitrary degree sequences", 2006. http://arxiv.org/abs/cond-mat/0312028 """ if G.is_multigraph() or G.is_directed(): raise Exception('rich_club_coefficient is not implemented for ', 'directed or multiedge graphs.') if len(G.selfloop_edges()) > 0: raise Exception('rich_club_coefficient is not implemented for ', 'graphs with self loops.') rc=_compute_rc(G) if normalized: # make R a copy of G, randomize with Q*|E| double edge swaps # and use rich_club coefficient of R to normalize R = G.copy() E = R.number_of_edges() nx.double_edge_swap(R,Q*E,max_tries=Q*E*10) rcran=_compute_rc(R) for d in rc: # if rcran[d] > 0: rc[d]/=rcran[d] return rc def _compute_rc(G): # compute rich club coefficient for all k degrees in G deghist = nx.degree_histogram(G) total = sum(deghist) # number of nodes with degree > k (omit last entry which is zero) nks = [total-cs for cs in nx.utils.cumulative_sum(deghist) if total-cs > 1] deg=G.degree() edge_degrees=sorted(sorted((deg[u],deg[v])) for u,v in G.edges_iter()) ek=G.number_of_edges() k1,k2=edge_degrees.pop(0) rc={} for d,nk in zip(range(len(nks)),nks): while k1 <= d: if len(edge_degrees)==0: break k1,k2=edge_degrees.pop(0) ek-=1 rc[d] = 2.0*ek/(nk*(nk-1)) return rc networkx-1.8.1/networkx/algorithms/hierarchy.py0000664000175000017500000000336112177456333021662 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Flow Hierarchy. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __authors__ = "\n".join(['Ben Edwards (bedwards@cs.unm.edu)']) __all__ = ['flow_hierarchy'] def flow_hierarchy(G, weight=None): """Returns the flow hierarchy of a directed network. Flow hierarchy is defined as the fraction of edges not participating in cycles in a directed graph [1]_. Parameters ---------- G : DiGraph or MultiDiGraph A directed graph weight : key,optional (default=None) Attribute to use for node weights. If None the weight defaults to 1. Returns ------- h : float Flow heirarchy value Notes ----- The algorithm described in [1]_ computes the flow hierarchy through exponentiation of the adjacency matrix. This function implements an alternative approach that finds strongly connected components. An edge is in a cycle if and only if it is in a strongly connected component, which can be found in `O(m)` time using Tarjan's algorithm. References ---------- .. [1] Luo, J.; Magee, C.L. (2011), Detecting evolving patterns of self-organizing networks by flow hierarchy measurement, Complexity, Volume 16 Issue 6 53-61. DOI: 10.1002/cplx.20368 http://web.mit.edu/~cmagee/www/documents/28-DetectingEvolvingPatterns_FlowHierarchy.pdf """ if not G.is_directed(): raise nx.NetworkXError("G must be a digraph in flow_heirarchy") scc = nx.strongly_connected_components(G) return 1.-sum(G.subgraph(c).size(weight) for c in scc)/float(G.size(weight)) networkx-1.8.1/networkx/algorithms/cycles.py0000664000175000017500000002575412177456333021200 0ustar aricaric00000000000000""" ======================== Cycle finding algorithms ======================== """ # Copyright (C) 2010-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import * from collections import defaultdict __all__ = ['cycle_basis','simple_cycles','recursive_simple_cycles'] __author__ = "\n".join(['Jon Olav Vik ', 'Dan Schult ', 'Aric Hagberg ']) @not_implemented_for('directed') @not_implemented_for('multigraph') def cycle_basis(G,root=None): """ Returns a list of cycles which form a basis for cycles of G. A basis for cycles of a network is a minimal collection of cycles such that any cycle in the network can be written as a sum of cycles in the basis. Here summation of cycles is defined as "exclusive or" of the edges. Cycle bases are useful, e.g. when deriving equations for electric circuits using Kirchhoff's Laws. Parameters ---------- G : NetworkX Graph root : node, optional Specify starting node for basis. Returns ------- A list of cycle lists. Each cycle list is a list of nodes which forms a cycle (loop) in G. Examples -------- >>> G=nx.Graph() >>> G.add_cycle([0,1,2,3]) >>> G.add_cycle([0,3,4,5]) >>> print(nx.cycle_basis(G,0)) [[3, 4, 5, 0], [1, 2, 3, 0]] Notes ----- This is adapted from algorithm CACM 491 [1]_. References ---------- .. [1] Paton, K. An algorithm for finding a fundamental set of cycles of a graph. Comm. ACM 12, 9 (Sept 1969), 514-518. See Also -------- simple_cycles """ gnodes=set(G.nodes()) cycles=[] while gnodes: # loop over connected components if root is None: root=gnodes.pop() stack=[root] pred={root:root} used={root:set()} while stack: # walk the spanning tree finding cycles z=stack.pop() # use last-in so cycles easier to find zused=used[z] for nbr in G[z]: if nbr not in used: # new node pred[nbr]=z stack.append(nbr) used[nbr]=set([z]) elif nbr == z: # self loops cycles.append([z]) elif nbr not in zused:# found a cycle pn=used[nbr] cycle=[nbr,z] p=pred[z] while p not in pn: cycle.append(p) p=pred[p] cycle.append(p) cycles.append(cycle) used[nbr].add(z) gnodes-=set(pred) root=None return cycles @not_implemented_for('undirected') def simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. An simple cycle, or elementary circuit, is a closed path where no node appears twice, except that the first and last node are the same. Two elementary circuits are distinct if they are not cyclic permutations of each other. This is a nonrecursive, iterator/generator version of Johnson's algorithm [1]_. There may be better algorithms for some cases [2]_ [3]_. Parameters ---------- G : NetworkX DiGraph A directed graph Returns ------- cycle_generator: generator A generator that produces elementary cycles of the graph. Each cycle is a list of nodes with the first and last nodes being the same. Examples -------- >>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]) >>> list(nx.simple_cycles(G)) [[2], [2, 1], [2, 0], [2, 0, 1], [0]] Notes ----- The implementation follows pp. 79-80 in [1]_. The time complexity is O((n+e)(c+1)) for n nodes, e edges and c elementary circuits. To filter the cycles so that they don't include certain nodes or edges, copy your graph and eliminate those nodes or edges before calling. >>> copyG = G.copy() >>> copyG.remove_nodes_from([1]) >>> copyG.remove_edges_from([(0,1)]) >>> list(nx.simple_cycles(copyG)) [[2], [2, 0], [0]] References ---------- .. [1] Finding all the elementary circuits of a directed graph. D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975. http://dx.doi.org/10.1137/0204007 .. [2] Enumerating the cycles of a digraph: a new preprocessing strategy. G. Loizou and P. Thanish, Information Sciences, v. 27, 163-182, 1982. .. [3] A search strategy for the elementary cycles of a directed graph. J.L. Szwarcfiter and P.E. Lauer, BIT NUMERICAL MATHEMATICS, v. 16, no. 2, 192-204, 1976. See Also -------- cycle_basis """ def _unblock(thisnode,blocked,B): stack=set([thisnode]) while stack: node=stack.pop() if node in blocked: blocked.remove(node) stack.update(B[node]) B[node].clear() # Johnson's algorithm requires some ordering of the nodes. # We assign the arbitrary ordering given by the strongly connected comps # There is no need to track the ordering as each node removed as processed. subG=G.copy() # save the actual graph so we can mutate it here sccs = nx.strongly_connected_components(subG) while sccs: scc=sccs.pop() # order of scc determines ordering of nodes startnode = scc.pop() # Processing node runs "circuit" routine from recursive version path=[startnode] blocked = set() # vertex: blocked from search? closed = set() # nodes involved in a cycle blocked.add(startnode) B=defaultdict(set) # graph portions that yield no elementary circuit stack=[ (startnode,list(subG[startnode])) ] # subG gives component nbrs while stack: thisnode,nbrs = stack[-1] if nbrs: nextnode = nbrs.pop() # print thisnode,nbrs,":",nextnode,blocked,B,path,stack,startnode # f=raw_input("pause") if nextnode == startnode: yield path[:] closed.update(path) # print "Found a cycle",path,closed elif nextnode not in blocked: path.append(nextnode) stack.append( (nextnode,list(subG[nextnode])) ) blocked.add(nextnode) continue # done with nextnode... look for more neighbors if not nbrs: # no more nbrs if thisnode in closed: _unblock(thisnode,blocked,B) else: for nbr in G[thisnode]: if thisnode not in B[nbr]: B[nbr].add(thisnode) stack.pop() # assert path[-1]==thisnode path.pop() # done processing this node subG.remove_node(startnode) H=subG.subgraph(scc) # make smaller to avoid work in SCC routine sccs.extend(nx.strongly_connected_components(H)) @not_implemented_for('undirected') def recursive_simple_cycles(G): """Find simple cycles (elementary circuits) of a directed graph. A simple cycle, or elementary circuit, is a closed path where no node appears twice, except that the first and last node are the same. Two elementary circuits are distinct if they are not cyclic permutations of each other. This version uses a recursive algorithm to build a list of cycles. You should probably use the iterator version caled simple_cycles(). Warning: This recursive version uses lots of RAM! Parameters ---------- G : NetworkX DiGraph A directed graph Returns ------- A list of circuits, where each circuit is a list of nodes, with the first and last node being the same. Example: >>> G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]) >>> nx.recursive_simple_cycles(G) [[0], [0, 1, 2], [0, 2], [1, 2], [2]] See Also -------- cycle_basis (for undirected graphs) Notes ----- The implementation follows pp. 79-80 in [1]_. The time complexity is O((n+e)(c+1)) for n nodes, e edges and c elementary circuits. References ---------- .. [1] Finding all the elementary circuits of a directed graph. D. B. Johnson, SIAM Journal on Computing 4, no. 1, 77-84, 1975. http://dx.doi.org/10.1137/0204007 See Also -------- simple_cycles, cycle_basis """ # Jon Olav Vik, 2010-08-09 def _unblock(thisnode): """Recursively unblock and remove nodes from B[thisnode].""" if blocked[thisnode]: blocked[thisnode] = False while B[thisnode]: _unblock(B[thisnode].pop()) def circuit(thisnode, startnode, component): closed = False # set to True if elementary path is closed path.append(thisnode) blocked[thisnode] = True for nextnode in component[thisnode]: # direct successors of thisnode if nextnode == startnode: result.append(path[:]) closed = True elif not blocked[nextnode]: if circuit(nextnode, startnode, component): closed = True if closed: _unblock(thisnode) else: for nextnode in component[thisnode]: if thisnode not in B[nextnode]: # TODO: use set for speedup? B[nextnode].append(thisnode) path.pop() # remove thisnode from path return closed path = [] # stack of nodes in current path blocked = defaultdict(bool) # vertex: blocked from search? B = defaultdict(list) # graph portions that yield no elementary circuit result = [] # list to accumulate the circuits found # Johnson's algorithm requires some ordering of the nodes. # They might not be sortable so we assign an arbitrary ordering. ordering=dict(zip(G,range(len(G)))) for s in ordering: # Build the subgraph induced by s and following nodes in the ordering subgraph = G.subgraph(node for node in G if ordering[node] >= ordering[s]) # Find the strongly connected component in the subgraph # that contains the least node according to the ordering strongcomp = nx.strongly_connected_components(subgraph) mincomp=min(strongcomp, key=lambda nodes: min(ordering[n] for n in nodes)) component = G.subgraph(mincomp) if component: # smallest node in the component according to the ordering startnode = min(component,key=ordering.__getitem__) for node in component: blocked[node] = False B[node][:] = [] dummy=circuit(startnode, startnode, component) return result networkx-1.8.1/networkx/algorithms/traversal/0000775000175000017500000000000012177457361021334 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/traversal/depth_first_search.py0000664000175000017500000000744312177456333025554 0ustar aricaric00000000000000""" ================== Depth-first search ================== Basic algorithms for depth-first searching. Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py by D. Eppstein, July 2004. """ __author__ = """\n""".join(['Aric Hagberg ']) __all__ = ['dfs_edges', 'dfs_tree', 'dfs_predecessors', 'dfs_successors', 'dfs_preorder_nodes','dfs_postorder_nodes', 'dfs_labeled_edges'] import networkx as nx from collections import defaultdict def dfs_edges(G,source=None): """Produce edges in a depth-first-search starting at source.""" # Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py # by D. Eppstein, July 2004. if source is None: # produce edges for all components nodes=G else: # produce edges for components with source nodes=[source] visited=set() for start in nodes: if start in visited: continue visited.add(start) stack = [(start,iter(G[start]))] while stack: parent,children = stack[-1] try: child = next(children) if child not in visited: yield parent,child visited.add(child) stack.append((child,iter(G[child]))) except StopIteration: stack.pop() def dfs_tree(G, source): """Return directed tree of depth-first-search from source.""" T = nx.DiGraph() if source is None: T.add_nodes_from(G) else: T.add_node(source) T.add_edges_from(dfs_edges(G,source)) return T def dfs_predecessors(G, source=None): """Return dictionary of predecessors in depth-first-search from source.""" return dict((t,s) for s,t in dfs_edges(G,source=source)) def dfs_successors(G, source=None): """Return dictionary of successors in depth-first-search from source.""" d=defaultdict(list) for s,t in dfs_edges(G,source=source): d[s].append(t) return dict(d) def dfs_postorder_nodes(G,source=None): """Produce nodes in a depth-first-search post-ordering starting from source. """ post=(v for u,v,d in nx.dfs_labeled_edges(G,source=source) if d['dir']=='reverse') # chain source to end of pre-ordering # return chain(post,[source]) return post def dfs_preorder_nodes(G,source=None): """Produce nodes in a depth-first-search pre-ordering starting at source.""" pre=(v for u,v,d in nx.dfs_labeled_edges(G,source=source) if d['dir']=='forward') # chain source to beginning of pre-ordering # return chain([source],pre) return pre def dfs_labeled_edges(G,source=None): """Produce edges in a depth-first-search starting at source and labeled by direction type (forward, reverse, nontree). """ # Based on http://www.ics.uci.edu/~eppstein/PADS/DFS.py # by D. Eppstein, July 2004. if source is None: # produce edges for all components nodes=G else: # produce edges for components with source nodes=[source] visited=set() for start in nodes: if start in visited: continue yield start,start,{'dir':'forward'} visited.add(start) stack = [(start,iter(G[start]))] while stack: parent,children = stack[-1] try: child = next(children) if child in visited: yield parent,child,{'dir':'nontree'} else: yield parent,child,{'dir':'forward'} visited.add(child) stack.append((child,iter(G[child]))) except StopIteration: stack.pop() if stack: yield stack[-1][0],parent,{'dir':'reverse'} yield start,start,{'dir':'reverse'} networkx-1.8.1/networkx/algorithms/traversal/breadth_first_search.py0000664000175000017500000000321412177456333026051 0ustar aricaric00000000000000""" ==================== Breadth-first search ==================== Basic algorithms for breadth-first searching. """ __author__ = """\n""".join(['Aric Hagberg ']) __all__ = ['bfs_edges', 'bfs_tree', 'bfs_predecessors', 'bfs_successors'] import networkx as nx from collections import defaultdict, deque def bfs_edges(G, source, reverse=False): """Produce edges in a breadth-first-search starting at source.""" # Based on http://www.ics.uci.edu/~eppstein/PADS/BFS.py # by D. Eppstein, July 2004. if reverse and isinstance(G, nx.DiGraph): neighbors = G.predecessors_iter else: neighbors = G.neighbors_iter visited=set([source]) queue = deque([(source, neighbors(source))]) while queue: parent, children = queue[0] try: child = next(children) if child not in visited: yield parent, child visited.add(child) queue.append((child, neighbors(child))) except StopIteration: queue.popleft() def bfs_tree(G, source, reverse=False): """Return directed tree of breadth-first-search from source.""" T = nx.DiGraph() T.add_node(source) T.add_edges_from(bfs_edges(G,source,reverse=reverse)) return T def bfs_predecessors(G, source): """Return dictionary of predecessors in breadth-first-search from source.""" return dict((t,s) for s,t in bfs_edges(G,source)) def bfs_successors(G, source): """Return dictionary of successors in breadth-first-search from source.""" d=defaultdict(list) for s,t in bfs_edges(G,source): d[s].append(t) return dict(d) networkx-1.8.1/networkx/algorithms/traversal/__init__.py0000664000175000017500000000036212177456333023444 0ustar aricaric00000000000000import networkx.algorithms.traversal.depth_first_search from networkx.algorithms.traversal.depth_first_search import * import networkx.algorithms.traversal.breadth_first_search from networkx.algorithms.traversal.breadth_first_search import * networkx-1.8.1/networkx/algorithms/traversal/tests/0000775000175000017500000000000012177457361022476 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/traversal/tests/test_bfs.py0000664000175000017500000000206312177456333024660 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestBFS: def setUp(self): # simple graph G=nx.Graph() G.add_edges_from([(0,1),(1,2),(1,3),(2,4),(3,4)]) self.G=G def test_successor(self): assert_equal(nx.bfs_successors(self.G,source=0), {0: [1], 1: [2,3], 2:[4]}) def test_predecessor(self): assert_equal(nx.bfs_predecessors(self.G,source=0), {1: 0, 2: 1, 3: 1, 4: 2}) def test_bfs_tree(self): T=nx.bfs_tree(self.G,source=0) assert_equal(sorted(T.nodes()),sorted(self.G.nodes())) assert_equal(sorted(T.edges()),[(0, 1), (1, 2), (1, 3), (2, 4)]) def test_bfs_edges(self): edges=nx.bfs_edges(self.G,source=0) assert_equal(list(edges),[(0, 1), (1, 2), (1, 3), (2, 4)]) def test_bfs_tree_isolates(self): G = nx.Graph() G.add_node(1) G.add_node(2) T=nx.bfs_tree(G,source=1) assert_equal(sorted(T.nodes()),[1]) assert_equal(sorted(T.edges()),[]) networkx-1.8.1/networkx/algorithms/traversal/tests/test_dfs.py0000664000175000017500000000460312177456333024664 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestDFS: def setUp(self): # simple graph G=nx.Graph() G.add_edges_from([(0,1),(1,2),(1,3),(2,4),(3,4)]) self.G=G # simple graph, disconnected D=nx.Graph() D.add_edges_from([(0,1),(2,3)]) self.D=D def test_preorder_nodes(self): assert_equal(list(nx.dfs_preorder_nodes(self.G,source=0)), [0, 1, 2, 4, 3]) assert_equal(list(nx.dfs_preorder_nodes(self.D)),[0, 1, 2, 3]) def test_postorder_nodes(self): assert_equal(list(nx.dfs_postorder_nodes(self.G,source=0)), [3, 4, 2, 1, 0]) assert_equal(list(nx.dfs_postorder_nodes(self.D)),[1, 0, 3, 2]) def test_successor(self): assert_equal(nx.dfs_successors(self.G,source=0), {0: [1], 1: [2], 2: [4], 4: [3]}) assert_equal(nx.dfs_successors(self.D), {0: [1], 2: [3]}) def test_predecessor(self): assert_equal(nx.dfs_predecessors(self.G,source=0), {1: 0, 2: 1, 3: 4, 4: 2}) assert_equal(nx.dfs_predecessors(self.D), {1: 0, 3: 2}) def test_dfs_tree(self): T=nx.dfs_tree(self.G,source=0) assert_equal(sorted(T.nodes()),sorted(self.G.nodes())) assert_equal(sorted(T.edges()),[(0, 1), (1, 2), (2, 4), (4, 3)]) def test_dfs_edges(self): edges=nx.dfs_edges(self.G,source=0) assert_equal(list(edges),[(0, 1), (1, 2), (2, 4), (4, 3)]) edges=nx.dfs_edges(self.D) assert_equal(list(edges),[(0, 1), (2, 3)]) def test_dfs_labeled_edges(self): edges=list(nx.dfs_labeled_edges(self.G,source=0)) forward=[(u,v) for (u,v,d) in edges if d['dir']=='forward'] assert_equal(forward,[(0,0), (0, 1), (1, 2), (2, 4), (4, 3)]) def test_dfs_labeled_disconnected_edges(self): edges=list(nx.dfs_labeled_edges(self.D)) forward=[(u,v) for (u,v,d) in edges if d['dir']=='forward'] assert_equal(forward,[(0, 0), (0, 1), (2, 2), (2, 3)]) def test_dfs_tree_isolates(self): G = nx.Graph() G.add_node(1) G.add_node(2) T=nx.dfs_tree(G,source=1) assert_equal(sorted(T.nodes()),[1]) assert_equal(sorted(T.edges()),[]) T=nx.dfs_tree(G,source=None) assert_equal(sorted(T.nodes()),[1, 2]) assert_equal(sorted(T.edges()),[]) networkx-1.8.1/networkx/algorithms/connectivity/0000775000175000017500000000000012177457361022047 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/connectivity/connectivity.py0000664000175000017500000004763512177456333025154 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Flow based connectivity algorithms """ import itertools import networkx as nx __author__ = '\n'.join(['Jordi Torrents ']) __all__ = [ 'average_node_connectivity', 'local_node_connectivity', 'node_connectivity', 'local_edge_connectivity', 'edge_connectivity', 'all_pairs_node_connectivity_matrix', 'dominating_set', ] def average_node_connectivity(G): r"""Returns the average connectivity of a graph G. The average connectivity `\bar{\kappa}` of a graph G is the average of local node connectivity over all pairs of nodes of G [1]_ . .. math:: \bar{\kappa}(G) = \frac{\sum_{u,v} \kappa_{G}(u,v)}{{n \choose 2}} Parameters ---------- G : NetworkX graph Undirected graph Returns ------- K : float Average node connectivity See also -------- local_node_connectivity node_connectivity local_edge_connectivity edge_connectivity max_flow ford_fulkerson References ---------- .. [1] Beineke, L., O. Oellermann, and R. Pippert (2002). The average connectivity of a graph. Discrete mathematics 252(1-3), 31-45. http://www.sciencedirect.com/science/article/pii/S0012365X01001807 """ if G.is_directed(): iter_func = itertools.permutations else: iter_func = itertools.combinations H, mapping = _aux_digraph_node_connectivity(G) num = 0. den = 0. for u,v in iter_func(G, 2): den += 1 num += local_node_connectivity(G, u, v, aux_digraph=H, mapping=mapping) if den == 0: # Null Graph return 0 return num/den def _aux_digraph_node_connectivity(G): r""" Creates a directed graph D from an undirected graph G to compute flow based node connectivity. For an undirected graph G having `n` nodes and `m` edges we derive a directed graph D with 2n nodes and 2m+n arcs by replacing each original node `v` with two nodes `vA`,`vB` linked by an (internal) arc in D. Then for each edge (u,v) in G we add two arcs (uB,vA) and (vB,uA) in D. Finally we set the attribute capacity = 1 for each arc in D [1]. For a directed graph having `n` nodes and `m` arcs we derive a directed graph D with 2n nodes and m+n arcs by replacing each original node `v` with two nodes `vA`,`vB` linked by an (internal) arc `(vA,vB)` in D. Then for each arc (u,v) in G we add one arc (uB,vA) in D. Finally we set the attribute capacity = 1 for each arc in D. References ---------- .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and Erlebach, 'Network Analysis: Methodological Foundations', Lecture Notes in Computer Science, Volume 3418, Springer-Verlag, 2005. http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf """ directed = G.is_directed() mapping = {} D = nx.DiGraph() for i,node in enumerate(G): mapping[node] = i D.add_node('%dA' % i,id=node) D.add_node('%dB' % i,id=node) D.add_edge('%dA' % i, '%dB' % i, capacity=1) edges = [] for (source, target) in G.edges(): edges.append(('%sB' % mapping[source], '%sA' % mapping[target])) if not directed: edges.append(('%sB' % mapping[target], '%sA' % mapping[source])) D.add_edges_from(edges, capacity=1) return D, mapping def local_node_connectivity(G, s, t, aux_digraph=None, mapping=None): r"""Computes local node connectivity for nodes s and t. Local node connectivity for two non adjacent nodes s and t is the minimum number of nodes that must be removed (along with their incident edges) to disconnect them. This is a flow based implementation of node connectivity. We compute the maximum flow on an auxiliary digraph build from the original input graph (see below for details). This is equal to the local node connectivity because the value of a maximum s-t-flow is equal to the capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ . Parameters ---------- G : NetworkX graph Undirected graph s : node Source node t : node Target node aux_digraph : NetworkX DiGraph (default=None) Auxiliary digraph to compute flow based node connectivity. If None the auxiliary digraph is build. mapping : dict (default=None) Dictionary with a mapping of node names in G and in the auxiliary digraph. Returns ------- K : integer local node connectivity for nodes s and t Examples -------- >>> # Platonic icosahedral graph has node connectivity 5 >>> # for each non adjacent node pair >>> G = nx.icosahedral_graph() >>> nx.local_node_connectivity(G,0,6) 5 Notes ----- This is a flow based implementation of node connectivity. We compute the maximum flow using the Ford and Fulkerson algorithm on an auxiliary digraph build from the original input graph: For an undirected graph G having `n` nodes and `m` edges we derive a directed graph D with 2n nodes and 2m+n arcs by replacing each original node `v` with two nodes `v_A`, `v_B` linked by an (internal) arc in `D`. Then for each edge (`u`, `v`) in G we add two arcs (`u_B`, `v_A`) and (`v_B`, `u_A`) in `D`. Finally we set the attribute capacity = 1 for each arc in `D` [1]_ . For a directed graph G having `n` nodes and `m` arcs we derive a directed graph `D` with `2n` nodes and `m+n` arcs by replacing each original node `v` with two nodes `v_A`, `v_B` linked by an (internal) arc `(v_A, v_B)` in D. Then for each arc `(u,v)` in G we add one arc `(u_B,v_A)` in `D`. Finally we set the attribute capacity = 1 for each arc in `D`. This is equal to the local node connectivity because the value of a maximum s-t-flow is equal to the capacity of a minimum s-t-cut (Ford and Fulkerson theorem). See also -------- node_connectivity all_pairs_node_connectivity_matrix local_edge_connectivity edge_connectivity max_flow ford_fulkerson References ---------- .. [1] Kammer, Frank and Hanjo Taubig. Graph Connectivity. in Brandes and Erlebach, 'Network Analysis: Methodological Foundations', Lecture Notes in Computer Science, Volume 3418, Springer-Verlag, 2005. http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf """ if aux_digraph is None or mapping is None: H, mapping = _aux_digraph_node_connectivity(G) else: H = aux_digraph return nx.max_flow(H,'%sB' % mapping[s], '%sA' % mapping[t]) def node_connectivity(G, s=None, t=None): r"""Returns node connectivity for a graph or digraph G. Node connectivity is equal to the minimum number of nodes that must be removed to disconnect G or render it trivial. If source and target nodes are provided, this function returns the local node connectivity: the minimum number of nodes that must be removed to break all paths from source to target in G. This is a flow based implementation. The algorithm is based in solving a number of max-flow problems (ie local st-node connectivity, see local_node_connectivity) to determine the capacity of the minimum cut on an auxiliary directed network that corresponds to the minimum node cut of G. It handles both directed and undirected graphs. Parameters ---------- G : NetworkX graph Undirected graph s : node Source node. Optional (default=None) t : node Target node. Optional (default=None) Returns ------- K : integer Node connectivity of G, or local node connectivity if source and target were provided Examples -------- >>> # Platonic icosahedral graph is 5-node-connected >>> G = nx.icosahedral_graph() >>> nx.node_connectivity(G) 5 >>> nx.node_connectivity(G, 3, 7) 5 Notes ----- This is a flow based implementation of node connectivity. The algorithm works by solving `O((n-\delta-1+\delta(\delta-1)/2)` max-flow problems on an auxiliary digraph. Where `\delta` is the minimum degree of G. For details about the auxiliary digraph and the computation of local node connectivity see local_node_connectivity. This implementation is based on algorithm 11 in [1]_. We use the Ford and Fulkerson algorithm to compute max flow (see ford_fulkerson). See also -------- local_node_connectivity all_pairs_node_connectivity_matrix local_edge_connectivity edge_connectivity max_flow ford_fulkerson References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ # Local node connectivity if s is not None and t is not None: if s not in G: raise nx.NetworkXError('node %s not in graph' % s) if t not in G: raise nx.NetworkXError('node %s not in graph' % t) return local_node_connectivity(G, s, t) # Global node connectivity if G.is_directed(): if not nx.is_weakly_connected(G): return 0 iter_func = itertools.permutations # I think that it is necessary to consider both predecessors # and successors for directed graphs def neighbors(v): return itertools.chain.from_iterable([G.predecessors_iter(v), G.successors_iter(v)]) else: if not nx.is_connected(G): return 0 iter_func = itertools.combinations neighbors = G.neighbors_iter # Initial guess \kappa = n - 1 K = G.order()-1 deg = G.degree() min_deg = min(deg.values()) v = next(n for n,d in deg.items() if d==min_deg) # Reuse the auxiliary digraph H, mapping = _aux_digraph_node_connectivity(G) # compute local node connectivity with all non-neighbors nodes for w in set(G) - set(neighbors(v)) - set([v]): K = min(K, local_node_connectivity(G, v, w, aux_digraph=H, mapping=mapping)) # Same for non adjacent pairs of neighbors of v for x,y in iter_func(neighbors(v), 2): if y in G[x]: continue K = min(K, local_node_connectivity(G, x, y, aux_digraph=H, mapping=mapping)) return K def all_pairs_node_connectivity_matrix(G): """Return a numpy 2d ndarray with node connectivity between all pairs of nodes. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- K : 2d numpy ndarray node connectivity between all pairs of nodes. See also -------- local_node_connectivity node_connectivity local_edge_connectivity edge_connectivity max_flow ford_fulkerson """ try: import numpy except ImportError: raise ImportError(\ "all_pairs_node_connectivity_matrix() requires NumPy") n = G.order() M = numpy.zeros((n, n), dtype=int) # Create auxiliary Digraph D, mapping = _aux_digraph_node_connectivity(G) if G.is_directed(): for u, v in itertools.permutations(G, 2): K = local_node_connectivity(G, u, v, aux_digraph=D, mapping=mapping) M[mapping[u],mapping[v]] = K else: for u, v in itertools.combinations(G, 2): K = local_node_connectivity(G, u, v, aux_digraph=D, mapping=mapping) M[mapping[u],mapping[v]] = M[mapping[v],mapping[u]] = K return M def _aux_digraph_edge_connectivity(G): """Auxiliary digraph for computing flow based edge connectivity If the input graph is undirected, we replace each edge (u,v) with two reciprocal arcs (u,v) and (v,u) and then we set the attribute 'capacity' for each arc to 1. If the input graph is directed we simply add the 'capacity' attribute. Part of algorithm 1 in [1]_ . References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. (this is a chapter, look for the reference of the book). http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ if G.is_directed(): if nx.get_edge_attributes(G, 'capacity'): return G D = G.copy() capacity = dict((e,1) for e in D.edges()) nx.set_edge_attributes(D, 'capacity', capacity) return D else: D = G.to_directed() capacity = dict((e,1) for e in D.edges()) nx.set_edge_attributes(D, 'capacity', capacity) return D def local_edge_connectivity(G, u, v, aux_digraph=None): r"""Returns local edge connectivity for nodes s and t in G. Local edge connectivity for two nodes s and t is the minimum number of edges that must be removed to disconnect them. This is a flow based implementation of edge connectivity. We compute the maximum flow on an auxiliary digraph build from the original network (see below for details). This is equal to the local edge connectivity because the value of a maximum s-t-flow is equal to the capacity of a minimum s-t-cut (Ford and Fulkerson theorem) [1]_ . Parameters ---------- G : NetworkX graph Undirected or directed graph s : node Source node t : node Target node aux_digraph : NetworkX DiGraph (default=None) Auxiliary digraph to compute flow based edge connectivity. If None the auxiliary digraph is build. Returns ------- K : integer local edge connectivity for nodes s and t Examples -------- >>> # Platonic icosahedral graph has edge connectivity 5 >>> # for each non adjacent node pair >>> G = nx.icosahedral_graph() >>> nx.local_edge_connectivity(G,0,6) 5 Notes ----- This is a flow based implementation of edge connectivity. We compute the maximum flow using the Ford and Fulkerson algorithm on an auxiliary digraph build from the original graph: If the input graph is undirected, we replace each edge (u,v) with two reciprocal arcs `(u,v)` and `(v,u)` and then we set the attribute 'capacity' for each arc to 1. If the input graph is directed we simply add the 'capacity' attribute. This is an implementation of algorithm 1 in [1]_. The maximum flow in the auxiliary network is equal to the local edge connectivity because the value of a maximum s-t-flow is equal to the capacity of a minimum s-t-cut (Ford and Fulkerson theorem). See also -------- local_node_connectivity node_connectivity edge_connectivity max_flow ford_fulkerson References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ if aux_digraph is None: H = _aux_digraph_edge_connectivity(G) else: H = aux_digraph return nx.max_flow(H, u, v) def edge_connectivity(G, s=None, t=None): r"""Returns the edge connectivity of the graph or digraph G. The edge connectivity is equal to the minimum number of edges that must be removed to disconnect G or render it trivial. If source and target nodes are provided, this function returns the local edge connectivity: the minimum number of edges that must be removed to break all paths from source to target in G. This is a flow based implementation. The algorithm is based in solving a number of max-flow problems (ie local st-edge connectivity, see local_edge_connectivity) to determine the capacity of the minimum cut on an auxiliary directed network that corresponds to the minimum edge cut of G. It handles both directed and undirected graphs. Parameters ---------- G : NetworkX graph Undirected or directed graph s : node Source node. Optional (default=None) t : node Target node. Optional (default=None) Returns ------- K : integer Edge connectivity for G, or local edge connectivity if source and target were provided Examples -------- >>> # Platonic icosahedral graph is 5-edge-connected >>> G = nx.icosahedral_graph() >>> nx.edge_connectivity(G) 5 Notes ----- This is a flow based implementation of global edge connectivity. For undirected graphs the algorithm works by finding a 'small' dominating set of nodes of G (see algorithm 7 in [1]_ ) and computing local max flow (see local_edge_connectivity) between an arbitrary node in the dominating set and the rest of nodes in it. This is an implementation of algorithm 6 in [1]_ . For directed graphs, the algorithm does n calls to the max flow function. This is an implementation of algorithm 8 in [1]_ . We use the Ford and Fulkerson algorithm to compute max flow (see ford_fulkerson). See also -------- local_node_connectivity node_connectivity local_edge_connectivity max_flow ford_fulkerson References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ # Local edge connectivity if s is not None and t is not None: if s not in G: raise nx.NetworkXError('node %s not in graph' % s) if t not in G: raise nx.NetworkXError('node %s not in graph' % t) return local_edge_connectivity(G, s, t) # Global edge connectivity if G.is_directed(): # Algorithm 8 in [1] if not nx.is_weakly_connected(G): return 0 # initial value for lambda is min degree (\delta(G)) L = min(G.degree().values()) # reuse auxiliary digraph H = _aux_digraph_edge_connectivity(G) nodes = G.nodes() n = len(nodes) for i in range(n): try: L = min(L, local_edge_connectivity(G, nodes[i], nodes[i+1], aux_digraph=H)) except IndexError: # last node! L = min(L, local_edge_connectivity(G, nodes[i], nodes[0], aux_digraph=H)) return L else: # undirected # Algorithm 6 in [1] if not nx.is_connected(G): return 0 # initial value for lambda is min degree (\delta(G)) L = min(G.degree().values()) # reuse auxiliary digraph H = _aux_digraph_edge_connectivity(G) # A dominating set is \lambda-covering # We need a dominating set with at least two nodes for node in G: D = dominating_set(G, start_with=node) v = D.pop() if D: break else: # in complete graphs the dominating sets will always be of one node # thus we return min degree return L for w in D: L = min(L, local_edge_connectivity(G, v, w, aux_digraph=H)) return L def dominating_set(G, start_with=None): # Algorithm 7 in [1] all_nodes = set(G) if start_with is None: v = set(G).pop() # pick a node else: if start_with not in G: raise nx.NetworkXError('node %s not in G' % start_with) v = start_with D = set([v]) ND = set([nbr for nbr in G[v]]) other = all_nodes - ND - D while other: w = other.pop() D.add(w) ND.update([nbr for nbr in G[w] if nbr not in D]) other = all_nodes - ND - D return D def is_dominating_set(G, nbunch): # Proposed by Dan on the mailing list allnodes=set(G) testset=set(n for n in nbunch if n in G) nbrs=set() for n in testset: nbrs.update(G[n]) if nbrs - allnodes: # some nodes left--not dominating return False else: return True networkx-1.8.1/networkx/algorithms/connectivity/__init__.py0000664000175000017500000000024212177456333024154 0ustar aricaric00000000000000"""Flow based connectivity and cut algorithms """ from networkx.algorithms.connectivity.connectivity import * from networkx.algorithms.connectivity.cuts import * networkx-1.8.1/networkx/algorithms/connectivity/tests/0000775000175000017500000000000012177457361023211 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/connectivity/tests/test_connectivity.py0000664000175000017500000001207512177456333027343 0ustar aricaric00000000000000from nose.tools import assert_equal, assert_true, assert_false import networkx as nx # helper functions for tests def _generate_no_biconnected(max_attempts=50): attempts = 0 while True: G = nx.fast_gnp_random_graph(100,0.0575) if nx.is_connected(G) and not nx.is_biconnected(G): attempts = 0 yield G else: if attempts >= max_attempts: msg = "Tried %d times: no suitable Graph." raise Exception(msg % max_attempts) else: attempts += 1 def is_dominating_set(G, nbunch): # Proposed by Dan on the mailing list allnodes=set(G) testset=set(n for n in nbunch if n in G) nbrs=set() for n in testset: nbrs.update(G[n]) if nbrs - allnodes: # some nodes left--not dominating return False else: return True # Tests for node and edge connectivity def test_average_connectivity(): # figure 1 from: # Beineke, L., O. Oellermann, and R. Pippert (2002). The average # connectivity of a graph. Discrete mathematics 252(1-3), 31-45 # http://www.sciencedirect.com/science/article/pii/S0012365X01001807 G1 = nx.path_graph(3) G1.add_edges_from([(1,3),(1,4)]) assert_equal(nx.average_node_connectivity(G1),1) G2 = nx.path_graph(3) G2.add_edges_from([(1,3),(1,4),(0,3),(0,4),(3,4)]) assert_equal(nx.average_node_connectivity(G2),2.2) G3 = nx.Graph() assert_equal(nx.average_node_connectivity(G3),0) def test_articulation_points(): Ggen = _generate_no_biconnected() for i in range(5): G = next(Ggen) assert_equal(nx.node_connectivity(G), 1) def test_brandes_erlebach(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() G.add_edges_from([(1,2),(1,3),(1,4),(1,5),(2,3),(2,6),(3,4), (3,6),(4,6),(4,7),(5,7),(6,8),(6,9),(7,8), (7,10),(8,11),(9,10),(9,11),(10,11)]) assert_equal(3,nx.local_edge_connectivity(G,1,11)) assert_equal(3,nx.edge_connectivity(G,1,11)) assert_equal(2,nx.local_node_connectivity(G,1,11)) assert_equal(2,nx.node_connectivity(G,1,11)) assert_equal(2,nx.edge_connectivity(G)) # node 5 has degree 2 assert_equal(2,nx.node_connectivity(G)) def test_white_harary_1(): # Figure 1b white and harary (2001) # # http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF # A graph with high adhesion (edge connectivity) and low cohesion # (vertex connectivity) G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) G.remove_node(7) for i in range(4,7): G.add_edge(0,i) G = nx.disjoint_union(G, nx.complete_graph(4)) G.remove_node(G.order()-1) for i in range(7,10): G.add_edge(0,i) assert_equal(1, nx.node_connectivity(G)) assert_equal(3, nx.edge_connectivity(G)) def test_white_harary_2(): # Figure 8 white and harary (2001) # # http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) G.add_edge(0,4) # kappa <= lambda <= delta assert_equal(3, min(nx.core_number(G).values())) assert_equal(1, nx.node_connectivity(G)) assert_equal(1, nx.edge_connectivity(G)) def test_complete_graphs(): for n in range(5, 25, 5): G = nx.complete_graph(n) assert_equal(n-1, nx.node_connectivity(G)) assert_equal(n-1, nx.node_connectivity(G.to_directed())) assert_equal(n-1, nx.edge_connectivity(G)) assert_equal(n-1, nx.edge_connectivity(G.to_directed())) def test_empty_graphs(): for k in range(5, 25, 5): G = nx.empty_graph(k) assert_equal(0, nx.node_connectivity(G)) assert_equal(0, nx.edge_connectivity(G)) def test_petersen(): G = nx.petersen_graph() assert_equal(3, nx.node_connectivity(G)) assert_equal(3, nx.edge_connectivity(G)) def test_tutte(): G = nx.tutte_graph() assert_equal(3, nx.node_connectivity(G)) assert_equal(3, nx.edge_connectivity(G)) def test_dodecahedral(): G = nx.dodecahedral_graph() assert_equal(3, nx.node_connectivity(G)) assert_equal(3, nx.edge_connectivity(G)) def test_octahedral(): G=nx.octahedral_graph() assert_equal(4, nx.node_connectivity(G)) assert_equal(4, nx.edge_connectivity(G)) def test_icosahedral(): G=nx.icosahedral_graph() assert_equal(5, nx.node_connectivity(G)) assert_equal(5, nx.edge_connectivity(G)) def test_directed_edge_connectivity(): G = nx.cycle_graph(10,create_using=nx.DiGraph()) # only one direction D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges assert_equal(1, nx.edge_connectivity(G)) assert_equal(1, nx.local_edge_connectivity(G,1,4)) assert_equal(1, nx.edge_connectivity(G,1,4)) assert_equal(2, nx.edge_connectivity(D)) assert_equal(2, nx.local_edge_connectivity(D,1,4)) assert_equal(2, nx.edge_connectivity(D,1,4)) def test_dominating_set(): for i in range(5): G = nx.gnp_random_graph(100,0.1) D = nx.dominating_set(G) assert_true(is_dominating_set(G,D)) networkx-1.8.1/networkx/algorithms/connectivity/tests/test_cuts.py0000664000175000017500000001225412177456333025602 0ustar aricaric00000000000000from nose.tools import assert_equal, assert_true, assert_false, assert_raises import networkx as nx # Tests for node and edge cutsets def _generate_no_biconnected(max_attempts=50): attempts = 0 while True: G = nx.fast_gnp_random_graph(100,0.0575) if nx.is_connected(G) and not nx.is_biconnected(G): attempts = 0 yield G else: if attempts >= max_attempts: msg = "Tried %d times: no suitable Graph."%attempts raise Exception(msg % max_attempts) else: attempts += 1 def test_articulation_points(): Ggen = _generate_no_biconnected() for i in range(5): G = next(Ggen) cut = nx.minimum_node_cut(G) assert_true(len(cut) == 1) assert_true(cut.pop() in set(nx.articulation_points(G))) def test_brandes_erlebach_book(): # Figure 1 chapter 7: Connectivity # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf G = nx.Graph() G.add_edges_from([(1,2),(1,3),(1,4),(1,5),(2,3),(2,6),(3,4), (3,6),(4,6),(4,7),(5,7),(6,8),(6,9),(7,8), (7,10),(8,11),(9,10),(9,11),(10,11)]) # edge cutsets assert_equal(3, len(nx.minimum_edge_cut(G,1,11))) edge_cut = nx.minimum_edge_cut(G) assert_equal(2, len(edge_cut)) # Node 5 has only two edges H = G.copy() H.remove_edges_from(edge_cut) assert_false(nx.is_connected(H)) # node cuts assert_equal(set([6,7]), nx.minimum_st_node_cut(G,1,11)) assert_equal(set([6,7]), nx.minimum_node_cut(G,1,11)) node_cut = nx.minimum_node_cut(G) assert_equal(2,len(node_cut)) H = G.copy() H.remove_nodes_from(node_cut) assert_false(nx.is_connected(H)) def test_white_harary_paper(): # Figure 1b white and harary (2001) # http://eclectic.ss.uci.edu/~drwhite/sm-w23.PDF # A graph with high adhesion (edge connectivity) and low cohesion # (node connectivity) G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4)) G.remove_node(7) for i in range(4,7): G.add_edge(0,i) G = nx.disjoint_union(G, nx.complete_graph(4)) G.remove_node(G.order()-1) for i in range(7,10): G.add_edge(0,i) # edge cuts edge_cut = nx.minimum_edge_cut(G) assert_equal(3, len(edge_cut)) H = G.copy() H.remove_edges_from(edge_cut) assert_false(nx.is_connected(H)) # node cuts node_cut = nx.minimum_node_cut(G) assert_equal(set([0]), node_cut) H = G.copy() H.remove_nodes_from(node_cut) assert_false(nx.is_connected(H)) def test_petersen_cutset(): G = nx.petersen_graph() # edge cuts edge_cut = nx.minimum_edge_cut(G) assert_equal(3, len(edge_cut)) H = G.copy() H.remove_edges_from(edge_cut) assert_false(nx.is_connected(H)) # node cuts node_cut = nx.minimum_node_cut(G) assert_equal(3,len(node_cut)) H = G.copy() H.remove_nodes_from(node_cut) assert_false(nx.is_connected(H)) def test_octahedral_cutset(): G=nx.octahedral_graph() # edge cuts edge_cut = nx.minimum_edge_cut(G) assert_equal(4, len(edge_cut)) H = G.copy() H.remove_edges_from(edge_cut) assert_false(nx.is_connected(H)) # node cuts node_cut = nx.minimum_node_cut(G) assert_equal(4,len(node_cut)) H = G.copy() H.remove_nodes_from(node_cut) assert_false(nx.is_connected(H)) def test_icosahedral_cutset(): G=nx.icosahedral_graph() # edge cuts edge_cut = nx.minimum_edge_cut(G) assert_equal(5, len(edge_cut)) H = G.copy() H.remove_edges_from(edge_cut) assert_false(nx.is_connected(H)) # node cuts node_cut = nx.minimum_node_cut(G) assert_equal(5,len(node_cut)) H = G.copy() H.remove_nodes_from(node_cut) assert_false(nx.is_connected(H)) def test_node_cutset_exception(): G=nx.Graph() G.add_edges_from([(1,2),(3,4)]) assert_raises(nx.NetworkXError, nx.minimum_node_cut,G) def test_node_cutset_random_graphs(): for i in range(5): G = nx.fast_gnp_random_graph(50,0.2) if not nx.is_connected(G): ccs = iter(nx.connected_components(G)) start = next(ccs)[0] G.add_edges_from( (start,c[0]) for c in ccs ) cutset = nx.minimum_node_cut(G) assert_equal(nx.node_connectivity(G), len(cutset)) G.remove_nodes_from(cutset) assert_false(nx.is_connected(G)) def test_edge_cutset_random_graphs(): for i in range(5): G = nx.fast_gnp_random_graph(50,0.2) if not nx.is_connected(G): ccs = iter(nx.connected_components(G)) start = next(ccs)[0] G.add_edges_from( (start,c[0]) for c in ccs ) cutset = nx.minimum_edge_cut(G) assert_equal(nx.edge_connectivity(G), len(cutset)) G.remove_edges_from(cutset) assert_false(nx.is_connected(G)) # Test empty graphs def test_empty_graphs(): G = nx.Graph() D = nx.DiGraph() assert_raises(nx.NetworkXPointlessConcept, nx.minimum_node_cut, G) assert_raises(nx.NetworkXPointlessConcept, nx.minimum_node_cut, D) assert_raises(nx.NetworkXPointlessConcept, nx.minimum_edge_cut, G) assert_raises(nx.NetworkXPointlessConcept, nx.minimum_edge_cut, D) networkx-1.8.1/networkx/algorithms/connectivity/cuts.py0000664000175000017500000003240212177456333023376 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Flow based cut algorithms """ # http://www.informatik.uni-augsburg.de/thi/personen/kammer/Graph_Connectivity.pdf # http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf import itertools from operator import itemgetter import networkx as nx from networkx.algorithms.connectivity.connectivity import \ _aux_digraph_node_connectivity, _aux_digraph_edge_connectivity, \ dominating_set, node_connectivity __author__ = '\n'.join(['Jordi Torrents ']) __all__ = [ 'minimum_st_node_cut', 'minimum_node_cut', 'minimum_st_edge_cut', 'minimum_edge_cut', ] def minimum_st_edge_cut(G, s, t, capacity='capacity'): """Returns the edges of the cut-set of a minimum (s, t)-cut. We use the max-flow min-cut theorem, i.e., the capacity of a minimum capacity cut is equal to the flow value of a maximum flow. Parameters ---------- G : NetworkX graph Edges of the graph are expected to have an attribute called 'capacity'. If this attribute is not present, the edge is considered to have infinite capacity. s : node Source node for the flow. t : node Sink node for the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. Returns ------- cutset : set Set of edges that, if removed from the graph, will disconnect it Raises ------ NetworkXUnbounded If the graph has a path of infinite capacity, all cuts have infinite capacity and the function raises a NetworkXError. Examples -------- >>> G = nx.DiGraph() >>> G.add_edge('x','a', capacity = 3.0) >>> G.add_edge('x','b', capacity = 1.0) >>> G.add_edge('a','c', capacity = 3.0) >>> G.add_edge('b','c', capacity = 5.0) >>> G.add_edge('b','d', capacity = 4.0) >>> G.add_edge('d','e', capacity = 2.0) >>> G.add_edge('c','y', capacity = 2.0) >>> G.add_edge('e','y', capacity = 3.0) >>> sorted(nx.minimum_edge_cut(G, 'x', 'y')) [('c', 'y'), ('x', 'b')] >>> nx.min_cut(G, 'x', 'y') 3.0 """ try: flow, H = nx.ford_fulkerson_flow_and_auxiliary(G, s, t, capacity=capacity) cutset = set() # Compute reachable nodes from source in the residual network reachable = set(nx.single_source_shortest_path(H,s)) # And unreachable nodes others = set(H) - reachable # - set([s]) # Any edge in the original network linking these two partitions # is part of the edge cutset for u, nbrs in ((n, G[n]) for n in reachable): cutset.update((u,v) for v in nbrs if v in others) return cutset except nx.NetworkXUnbounded: # Should we raise any other exception or just let ford_fulkerson # propagate nx.NetworkXUnbounded ? raise nx.NetworkXUnbounded("Infinite capacity path, no minimum cut.") def minimum_st_node_cut(G, s, t, aux_digraph=None, mapping=None): r"""Returns a set of nodes of minimum cardinality that disconnect source from target in G. This function returns the set of nodes of minimum cardinality that, if removed, would destroy all paths among source and target in G. Parameters ---------- G : NetworkX graph s : node Source node. t : node Target node. Returns ------- cutset : set Set of nodes that, if removed, would destroy all paths between source and target in G. Examples -------- >>> # Platonic icosahedral graph has node connectivity 5 >>> G = nx.icosahedral_graph() >>> len(nx.minimum_node_cut(G, 0, 6)) 5 Notes ----- This is a flow based implementation of minimum node cut. The algorithm is based in solving a number of max-flow problems (ie local st-node connectivity, see local_node_connectivity) to determine the capacity of the minimum cut on an auxiliary directed network that corresponds to the minimum node cut of G. It handles both directed and undirected graphs. This implementation is based on algorithm 11 in [1]_. We use the Ford and Fulkerson algorithm to compute max flow (see ford_fulkerson). See also -------- node_connectivity edge_connectivity minimum_edge_cut max_flow ford_fulkerson References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ if aux_digraph is None or mapping is None: H, mapping = _aux_digraph_node_connectivity(G) else: H = aux_digraph edge_cut = minimum_st_edge_cut(H, '%sB' % mapping[s], '%sA' % mapping[t]) # Each node in the original graph maps to two nodes of the auxiliary graph node_cut = set(H.node[node]['id'] for edge in edge_cut for node in edge) return node_cut - set([s,t]) def minimum_node_cut(G, s=None, t=None): r"""Returns a set of nodes of minimum cardinality that disconnects G. If source and target nodes are provided, this function returns the set of nodes of minimum cardinality that, if removed, would destroy all paths among source and target in G. If not, it returns a set of nodes of minimum cardinality that disconnects G. Parameters ---------- G : NetworkX graph s : node Source node. Optional (default=None) t : node Target node. Optional (default=None) Returns ------- cutset : set Set of nodes that, if removed, would disconnect G. If source and target nodes are provided, the set contians the nodes that if removed, would destroy all paths between source and target. Examples -------- >>> # Platonic icosahedral graph has node connectivity 5 >>> G = nx.icosahedral_graph() >>> len(nx.minimum_node_cut(G)) 5 >>> # this is the minimum over any pair of non adjacent nodes >>> from itertools import combinations >>> for u,v in combinations(G, 2): ... if v not in G[u]: ... assert(len(nx.minimum_node_cut(G,u,v)) == 5) ... Notes ----- This is a flow based implementation of minimum node cut. The algorithm is based in solving a number of max-flow problems (ie local st-node connectivity, see local_node_connectivity) to determine the capacity of the minimum cut on an auxiliary directed network that corresponds to the minimum node cut of G. It handles both directed and undirected graphs. This implementation is based on algorithm 11 in [1]_. We use the Ford and Fulkerson algorithm to compute max flow (see ford_fulkerson). See also -------- node_connectivity edge_connectivity minimum_edge_cut max_flow ford_fulkerson References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ # Local minimum node cut if s is not None and t is not None: if s not in G: raise nx.NetworkXError('node %s not in graph' % s) if t not in G: raise nx.NetworkXError('node %s not in graph' % t) return minimum_st_node_cut(G, s, t) # Global minimum node cut # Analog to the algoritm 11 for global node connectivity in [1] if G.is_directed(): if not nx.is_weakly_connected(G): raise nx.NetworkXError('Input graph is not connected') iter_func = itertools.permutations def neighbors(v): return itertools.chain.from_iterable([G.predecessors_iter(v), G.successors_iter(v)]) else: if not nx.is_connected(G): raise nx.NetworkXError('Input graph is not connected') iter_func = itertools.combinations neighbors = G.neighbors_iter # Choose a node with minimum degree deg = G.degree() min_deg = min(deg.values()) v = next(n for n,d in deg.items() if d == min_deg) # Initial node cutset is all neighbors of the node with minimum degree min_cut = set(G[v]) # Reuse the auxiliary digraph H, mapping = _aux_digraph_node_connectivity(G) # compute st node cuts between v and all its non-neighbors nodes in G # and store the minimum for w in set(G) - set(neighbors(v)) - set([v]): this_cut = minimum_st_node_cut(G, v, w, aux_digraph=H, mapping=mapping) if len(min_cut) >= len(this_cut): min_cut = this_cut # Same for non adjacent pairs of neighbors of v for x,y in iter_func(neighbors(v),2): if y in G[x]: continue this_cut = minimum_st_node_cut(G, x, y, aux_digraph=H, mapping=mapping) if len(min_cut) >= len(this_cut): min_cut = this_cut return min_cut def minimum_edge_cut(G, s=None, t=None): r"""Returns a set of edges of minimum cardinality that disconnects G. If source and target nodes are provided, this function returns the set of edges of minimum cardinality that, if removed, would break all paths among source and target in G. If not, it returns a set of edges of minimum cardinality that disconnects G. Parameters ---------- G : NetworkX graph s : node Source node. Optional (default=None) t : node Target node. Optional (default=None) Returns ------- cutset : set Set of edges that, if removed, would disconnect G. If source and target nodes are provided, the set contians the edges that if removed, would destroy all paths between source and target. Examples -------- >>> # Platonic icosahedral graph has edge connectivity 5 >>> G = nx.icosahedral_graph() >>> len(nx.minimum_edge_cut(G)) 5 >>> # this is the minimum over any pair of nodes >>> from itertools import combinations >>> for u,v in combinations(G, 2): ... assert(len(nx.minimum_edge_cut(G,u,v)) == 5) ... Notes ----- This is a flow based implementation of minimum edge cut. For undirected graphs the algorithm works by finding a 'small' dominating set of nodes of G (see algorithm 7 in [1]_) and computing the maximum flow between an arbitrary node in the dominating set and the rest of nodes in it. This is an implementation of algorithm 6 in [1]_. For directed graphs, the algorithm does n calls to the max flow function. This is an implementation of algorithm 8 in [1]_. We use the Ford and Fulkerson algorithm to compute max flow (see ford_fulkerson). See also -------- node_connectivity edge_connectivity minimum_node_cut max_flow ford_fulkerson References ---------- .. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms. http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf """ # reuse auxiliary digraph H = _aux_digraph_edge_connectivity(G) # Local minimum edge cut if s and t are not None if s is not None and t is not None: if s not in G: raise nx.NetworkXError('node %s not in graph' % s) if t not in G: raise nx.NetworkXError('node %s not in graph' % t) return minimum_st_edge_cut(H, s, t) # Global minimum edge cut # Analog to the algoritm for global edge connectivity if G.is_directed(): # Based on algorithm 8 in [1] if not nx.is_weakly_connected(G): raise nx.NetworkXError('Input graph is not connected') # Initial cutset is all edges of a node with minimum degree deg = G.degree() min_deg = min(deg.values()) node = next(n for n,d in deg.items() if d==min_deg) min_cut = G.edges(node) nodes = G.nodes() n = len(nodes) for i in range(n): try: this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i+1]) if len(this_cut) <= len(min_cut): min_cut = this_cut except IndexError: # Last node! this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0]) if len(this_cut) <= len(min_cut): min_cut = this_cut return min_cut else: # undirected # Based on algorithm 6 in [1] if not nx.is_connected(G): raise nx.NetworkXError('Input graph is not connected') # Initial cutset is all edges of a node with minimum degree deg = G.degree() min_deg = min(deg.values()) node = next(n for n,d in deg.items() if d==min_deg) min_cut = G.edges(node) # A dominating set is \lambda-covering # We need a dominating set with at least two nodes for node in G: D = dominating_set(G, start_with=node) v = D.pop() if D: break else: # in complete graphs the dominating set will always be of one node # thus we return min_cut, which now contains the edges of a node # with minimum degree return min_cut for w in D: this_cut = minimum_st_edge_cut(H, v, w) if len(this_cut) <= len(min_cut): min_cut = this_cut return min_cut networkx-1.8.1/networkx/algorithms/core.py0000664000175000017500000002170012177456333020631 0ustar aricaric00000000000000""" Find the k-cores of a graph. The k-core is found by recursively pruning nodes with degrees less than k. See the following reference for details: An O(m) Algorithm for Cores Decomposition of Networks Vladimir Batagelj and Matjaz Zaversnik, 2003. http://arxiv.org/abs/cs.DS/0310049 """ __author__ = "\n".join(['Dan Schult (dschult@colgate.edu)', 'Jason Grout (jason-sage@creativetrax.com)', 'Aric Hagberg (hagberg@lanl.gov)']) # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['core_number','k_core','k_shell','k_crust','k_corona','find_cores'] import networkx as nx def core_number(G): """Return the core number for each vertex. A k-core is a maximal subgraph that contains nodes of degree k or more. The core number of a node is the largest value k of a k-core containing that node. Parameters ---------- G : NetworkX graph A graph or directed graph Returns ------- core_number : dictionary A dictionary keyed by node to the core number. Raises ------ NetworkXError The k-core is not defined for graphs with self loops or parallel edges. Notes ----- Not implemented for graphs with parallel edges or self loops. For directed graphs the node degree is defined to be the in-degree + out-degree. References ---------- .. [1] An O(m) Algorithm for Cores Decomposition of Networks Vladimir Batagelj and Matjaz Zaversnik, 2003. http://arxiv.org/abs/cs.DS/0310049 """ if G.is_multigraph(): raise nx.NetworkXError( 'MultiGraph and MultiDiGraph types not supported.') if G.number_of_selfloops()>0: raise nx.NetworkXError( 'Input graph has self loops; the core number is not defined.', 'Consider using G.remove_edges_from(G.selfloop_edges()).') if G.is_directed(): import itertools def neighbors(v): return itertools.chain.from_iterable([G.predecessors_iter(v), G.successors_iter(v)]) else: neighbors=G.neighbors_iter degrees=G.degree() # sort nodes by degree nodes=sorted(degrees,key=degrees.get) bin_boundaries=[0] curr_degree=0 for i,v in enumerate(nodes): if degrees[v]>curr_degree: bin_boundaries.extend([i]*(degrees[v]-curr_degree)) curr_degree=degrees[v] node_pos = dict((v,pos) for pos,v in enumerate(nodes)) # initial guesses for core is degree core=degrees nbrs=dict((v,set(neighbors(v))) for v in G) for v in nodes: for u in nbrs[v]: if core[u] > core[v]: nbrs[u].remove(v) pos=node_pos[u] bin_start=bin_boundaries[core[u]] node_pos[u]=bin_start node_pos[nodes[bin_start]]=pos nodes[bin_start],nodes[pos]=nodes[pos],nodes[bin_start] bin_boundaries[core[u]]+=1 core[u]-=1 return core find_cores=core_number def k_core(G,k=None,core_number=None): """Return the k-core of G. A k-core is a maximal subgraph that contains nodes of degree k or more. Parameters ---------- G : NetworkX graph A graph or directed graph k : int, optional The order of the core. If not specified return the main core. core_number : dictionary, optional Precomputed core numbers for the graph G. Returns ------- G : NetworkX graph The k-core subgraph Raises ------ NetworkXError The k-core is not defined for graphs with self loops or parallel edges. Notes ----- The main core is the core with the largest degree. Not implemented for graphs with parallel edges or self loops. For directed graphs the node degree is defined to be the in-degree + out-degree. Graph, node, and edge attributes are copied to the subgraph. See Also -------- core_number References ---------- .. [1] An O(m) Algorithm for Cores Decomposition of Networks Vladimir Batagelj and Matjaz Zaversnik, 2003. http://arxiv.org/abs/cs.DS/0310049 """ if core_number is None: core_number=nx.core_number(G) if k is None: k=max(core_number.values()) # max core nodes=(n for n in core_number if core_number[n]>=k) return G.subgraph(nodes).copy() def k_shell(G,k=None,core_number=None): """Return the k-shell of G. The k-shell is the subgraph of nodes in the k-core containing nodes of exactly degree k. Parameters ---------- G : NetworkX graph A graph or directed graph. k : int, optional The order of the shell. If not specified return the main shell. core_number : dictionary, optional Precomputed core numbers for the graph G. Returns ------- G : NetworkX graph The k-shell subgraph Raises ------ NetworkXError The k-shell is not defined for graphs with self loops or parallel edges. Notes ----- This is similar to k_corona but in that case only neighbors in the k-core are considered. Not implemented for graphs with parallel edges or self loops. For directed graphs the node degree is defined to be the in-degree + out-degree. Graph, node, and edge attributes are copied to the subgraph. See Also -------- core_number k_corona ---------- .. [1] A model of Internet topology using k-shell decomposition Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt, and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 http://www.pnas.org/content/104/27/11150.full """ if core_number is None: core_number=nx.core_number(G) if k is None: k=max(core_number.values()) # max core nodes=(n for n in core_number if core_number[n]==k) return G.subgraph(nodes).copy() def k_crust(G,k=None,core_number=None): """Return the k-crust of G. The k-crust is the graph G with the k-core removed. Parameters ---------- G : NetworkX graph A graph or directed graph. k : int, optional The order of the shell. If not specified return the main crust. core_number : dictionary, optional Precomputed core numbers for the graph G. Returns ------- G : NetworkX graph The k-crust subgraph Raises ------ NetworkXError The k-crust is not defined for graphs with self loops or parallel edges. Notes ----- This definition of k-crust is different than the definition in [1]_. The k-crust in [1]_ is equivalent to the k+1 crust of this algorithm. Not implemented for graphs with parallel edges or self loops. For directed graphs the node degree is defined to be the in-degree + out-degree. Graph, node, and edge attributes are copied to the subgraph. See Also -------- core_number References ---------- .. [1] A model of Internet topology using k-shell decomposition Shai Carmi, Shlomo Havlin, Scott Kirkpatrick, Yuval Shavitt, and Eran Shir, PNAS July 3, 2007 vol. 104 no. 27 11150-11154 http://www.pnas.org/content/104/27/11150.full """ if core_number is None: core_number=nx.core_number(G) if k is None: k=max(core_number.values())-1 nodes=(n for n in core_number if core_number[n]<=k) return G.subgraph(nodes).copy() def k_corona(G, k, core_number=None): """Return the k-crust of G. The k-corona is the subset of vertices in the k-core which have exactly k neighbours in the k-core. Parameters ---------- G : NetworkX graph A graph or directed graph k : int The order of the corona. core_number : dictionary, optional Precomputed core numbers for the graph G. Returns ------- G : NetworkX graph The k-corona subgraph Raises ------ NetworkXError The k-cornoa is not defined for graphs with self loops or parallel edges. Notes ----- Not implemented for graphs with parallel edges or self loops. For directed graphs the node degree is defined to be the in-degree + out-degree. Graph, node, and edge attributes are copied to the subgraph. See Also -------- core_number References ---------- .. [1] k -core (bootstrap) percolation on complex networks: Critical phenomena and nonlocal effects, A. V. Goltsev, S. N. Dorogovtsev, and J. F. F. Mendes, Phys. Rev. E 73, 056101 (2006) http://link.aps.org/doi/10.1103/PhysRevE.73.056101 """ if core_number is None: core_number = nx.core_number(G) nodes = (n for n in core_number if core_number[n] >= k and len([v for v in G[n] if core_number[v] >= k]) == k) return G.subgraph(nodes).copy() networkx-1.8.1/networkx/algorithms/smetric.py0000664000175000017500000000226612177456333021355 0ustar aricaric00000000000000import networkx as nx #from networkx.generators.smax import li_smax_graph def s_metric(G, normalized=True): """Return the s-metric of graph. The s-metric is defined as the sum of the products deg(u)*deg(v) for every edge (u,v) in G. If norm is provided construct the s-max graph and compute it's s_metric, and return the normalized s value Parameters ---------- G : graph The graph used to compute the s-metric. normalized : bool (optional) Normalize the value. Returns ------- s : float The s-metric of the graph. References ---------- .. [1] Lun Li, David Alderson, John C. Doyle, and Walter Willinger, Towards a Theory of Scale-Free Graphs: Definition, Properties, and Implications (Extended Version), 2005. http://arxiv.org/abs/cond-mat/0501169 """ if normalized: raise nx.NetworkXError("Normalization not implemented") # Gmax = li_smax_graph(list(G.degree().values())) # return s_metric(G,normalized=False)/s_metric(Gmax,normalized=False) # else: return float(sum([G.degree(u)*G.degree(v) for (u,v) in G.edges_iter()])) networkx-1.8.1/networkx/algorithms/block.py0000664000175000017500000000772712177456333021010 0ustar aricaric00000000000000# encoding: utf-8 """ Functions for creating network blockmodels from node partitions. Created by Drew Conway Copyright (c) 2010. All rights reserved. """ __author__ = """\n""".join(['Drew Conway ', 'Aric Hagberg ']) __all__=['blockmodel'] import networkx as nx def blockmodel(G,partitions,multigraph=False): """Returns a reduced graph constructed using the generalized block modeling technique. The blockmodel technique collapses nodes into blocks based on a given partitioning of the node set. Each partition of nodes (block) is represented as a single node in the reduced graph. Edges between nodes in the block graph are added according to the edges in the original graph. If the parameter multigraph is False (the default) a single edge is added with a weight equal to the sum of the edge weights between nodes in the original graph The default is a weight of 1 if weights are not specified. If the parameter multigraph is True then multiple edges are added each with the edge data from the original graph. Parameters ---------- G : graph A networkx Graph or DiGraph partitions : list of lists, or list of sets The partition of the nodes. Must be non-overlapping. multigraph : bool, optional If True return a MultiGraph with the edge data of the original graph applied to each corresponding edge in the new graph. If False return a Graph with the sum of the edge weights, or a count of the edges if the original graph is unweighted. Returns ------- blockmodel : a Networkx graph object Examples -------- >>> G=nx.path_graph(6) >>> partition=[[0,1],[2,3],[4,5]] >>> M=nx.blockmodel(G,partition) References ---------- .. [1] Patrick Doreian, Vladimir Batagelj, and Anuska Ferligoj "Generalized Blockmodeling",Cambridge University Press, 2004. """ # Create sets of node partitions part=list(map(set,partitions)) # Check for overlapping node partitions u=set() for p1,p2 in zip(part[:-1],part[1:]): u.update(p1) #if not u.isdisjoint(p2): # Python 2.6 required if len (u.intersection(p2))>0: raise nx.NetworkXException("Overlapping node partitions.") # Initialize blockmodel graph if multigraph: if G.is_directed(): M=nx.MultiDiGraph() else: M=nx.MultiGraph() else: if G.is_directed(): M=nx.DiGraph() else: M=nx.Graph() # Add nodes and properties to blockmodel # The blockmodel nodes are node-induced subgraphs of G # Label them with integers starting at 0 for i,p in zip(range(len(part)),part): M.add_node(i) # The node-induced subgraph is stored as the node 'graph' attribute SG=G.subgraph(p) M.node[i]['graph']=SG M.node[i]['nnodes']=SG.number_of_nodes() M.node[i]['nedges']=SG.number_of_edges() M.node[i]['density']=nx.density(SG) # Create mapping between original node labels and new blockmodel node labels block_mapping={} for n in M: nodes_in_block=M.node[n]['graph'].nodes() block_mapping.update(dict.fromkeys(nodes_in_block,n)) # Add edges to block graph for u,v,d in G.edges(data=True): bmu=block_mapping[u] bmv=block_mapping[v] if bmu==bmv: # no self loops continue if multigraph: # For multigraphs add an edge for each edge in original graph M.add_edge(bmu,bmv,attr_dict=d) else: # For graphs and digraphs add single weighted edge weight=d.get('weight',1.0) # default to 1 if no weight specified if M.has_edge(bmu,bmv): M[bmu][bmv]['weight']+=weight else: M.add_edge(bmu,bmv,weight=weight) return M networkx-1.8.1/networkx/algorithms/__init__.py0000664000175000017500000000412012177456333021435 0ustar aricaric00000000000000from networkx.algorithms.assortativity import * from networkx.algorithms.block import * from networkx.algorithms.boundary import * from networkx.algorithms.centrality import * from networkx.algorithms.cluster import * from networkx.algorithms.clique import * from networkx.algorithms.community import * from networkx.algorithms.components import * from networkx.algorithms.connectivity import * from networkx.algorithms.core import * from networkx.algorithms.cycles import * from networkx.algorithms.dag import * from networkx.algorithms.distance_measures import * from networkx.algorithms.flow import * from networkx.algorithms.hierarchy import * from networkx.algorithms.matching import * from networkx.algorithms.mis import * from networkx.algorithms.mst import * from networkx.algorithms.link_analysis import * from networkx.algorithms.operators import * from networkx.algorithms.shortest_paths import * from networkx.algorithms.smetric import * from networkx.algorithms.traversal import * from networkx.algorithms.isolate import * from networkx.algorithms.euler import * from networkx.algorithms.vitality import * from networkx.algorithms.chordal import * from networkx.algorithms.richclub import * from networkx.algorithms.distance_regular import * from networkx.algorithms.swap import * from networkx.algorithms.graphical import * from networkx.algorithms.simple_paths import * import networkx.algorithms.assortativity import networkx.algorithms.bipartite import networkx.algorithms.centrality import networkx.algorithms.cluster import networkx.algorithms.clique import networkx.algorithms.components import networkx.algorithms.connectivity import networkx.algorithms.flow import networkx.algorithms.isomorphism import networkx.algorithms.link_analysis import networkx.algorithms.shortest_paths import networkx.algorithms.traversal import networkx.algorithms.chordal import networkx.algorithms.operators from networkx.algorithms.bipartite import projected_graph,project,is_bipartite from networkx.algorithms.isomorphism import is_isomorphic,could_be_isomorphic,\ fast_could_be_isomorphic,faster_could_be_isomorphic networkx-1.8.1/networkx/algorithms/matching.py0000664000175000017500000010021612177456333021473 0ustar aricaric00000000000000""" ******** Matching ******** """ # Copyright (C) 2004-2008 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # Copyright (C) 2011 by # Nicholas Mancuso # All rights reserved. # BSD license. from itertools import repeat __author__ = """\n""".join(['Joris van Rantwijk', 'Nicholas Mancuso (nick.mancuso@gmail.com)']) _all__ = ['max_weight_matching', 'maximal_matching'] def maximal_matching(G): r"""Find a maximal cardinality matching in the graph. A matching is a subset of edges in which no node occurs more than once. The cardinality of a matching is the number of matched edges. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- matching : set A maximal matching of the graph. Notes ----- The algorithm greedily selects a maximal matching M of the graph G (i.e. no superset of M exists). It runs in `O(|E|)` time. """ matching = set([]) edges = set([]) for edge in G.edges_iter(): # If the edge isn't covered, add it to the matching # then remove neighborhood of u and v from consideration. if edge not in edges: u, v = edge matching.add(edge) edges |= set(G.edges(u)) edges |= set(G.edges(v)) return matching def max_weight_matching(G, maxcardinality=False): """Compute a maximum-weighted matching of G. A matching is a subset of edges in which no node occurs more than once. The cardinality of a matching is the number of matched edges. The weight of a matching is the sum of the weights of its edges. Parameters ---------- G : NetworkX graph Undirected graph maxcardinality: bool, optional If maxcardinality is True, compute the maximum-cardinality matching with maximum weight among all maximum-cardinality matchings. Returns ------- mate : dictionary The matching is returned as a dictionary, mate, such that mate[v] == w if node v is matched to node w. Unmatched nodes do not occur as a key in mate. Notes ------ If G has edges with 'weight' attribute the edge data are used as weight values else the weights are assumed to be 1. This function takes time O(number_of_nodes ** 3). If all edge weights are integers, the algorithm uses only integer computations. If floating point weights are used, the algorithm could return a slightly suboptimal matching due to numeric precision errors. This method is based on the "blossom" method for finding augmenting paths and the "primal-dual" method for finding a matching of maximum weight, both methods invented by Jack Edmonds [1]_. References ---------- .. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs", Zvi Galil, ACM Computing Surveys, 1986. """ # # The algorithm is taken from "Efficient Algorithms for Finding Maximum # Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986. # It is based on the "blossom" method for finding augmenting paths and # the "primal-dual" method for finding a matching of maximum weight, both # methods invented by Jack Edmonds. # # A C program for maximum weight matching by Ed Rothberg was used # extensively to validate this new code. # # Many terms used in the code comments are explained in the paper # by Galil. You will probably need the paper to make sense of this code. # class NoNode: """Dummy value which is different from any node.""" pass class Blossom: """Representation of a non-trivial blossom or sub-blossom.""" __slots__ = [ 'childs', 'edges', 'mybestedges' ] # b.childs is an ordered list of b's sub-blossoms, starting with # the base and going round the blossom. # b.edges is the list of b's connecting edges, such that # b.edges[i] = (v, w) where v is a vertex in b.childs[i] # and w is a vertex in b.childs[wrap(i+1)]. # If b is a top-level S-blossom, # b.mybestedges is a list of least-slack edges to neighbouring # S-blossoms, or None if no such list has been computed yet. # This is used for efficient computation of delta3. # Generate the blossom's leaf vertices. def leaves(self): for t in self.childs: if isinstance(t, Blossom): for v in t.leaves(): yield v else: yield t # Get a list of vertices. gnodes = G.nodes() if not gnodes: return { } # don't bother with empty graphs # Find the maximum edge weight. maxweight = 0 allinteger = True for i,j,d in G.edges_iter(data=True): wt=d.get('weight',1) if i != j and wt > maxweight: maxweight = wt allinteger = allinteger and (str(type(wt)).split("'")[1] in ('int', 'long')) # If v is a matched vertex, mate[v] is its partner vertex. # If v is a single vertex, v does not occur as a key in mate. # Initially all vertices are single; updated during augmentation. mate = { } # If b is a top-level blossom, # label.get(b) is None if b is unlabeled (free), # 1 if b is an S-blossom, # 2 if b is a T-blossom. # The label of a vertex is found by looking at the label of its top-level # containing blossom. # If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable # from an S-vertex outside the blossom. # Labels are assigned during a stage and reset after each augmentation. label = { } # If b is a labeled top-level blossom, # labeledge[b] = (v, w) is the edge through which b obtained its label # such that w is a vertex in b, or None if b's base vertex is single. # If w is a vertex inside a T-blossom and label[w] == 2, # labeledge[w] = (v, w) is an edge through which w is reachable from # outside the blossom. labeledge = { } # If v is a vertex, inblossom[v] is the top-level blossom to which v # belongs. # If v is a top-level vertex, inblossom[v] == v since v is itself # a (trivial) top-level blossom. # Initially all vertices are top-level trivial blossoms. inblossom = dict(zip(gnodes, gnodes)) # If b is a sub-blossom, # blossomparent[b] is its immediate parent (sub-)blossom. # If b is a top-level blossom, blossomparent[b] is None. blossomparent = dict(zip(gnodes, repeat(None))) # If b is a (sub-)blossom, # blossombase[b] is its base VERTEX (i.e. recursive sub-blossom). blossombase = dict(zip(gnodes, gnodes)) # If w is a free vertex (or an unreached vertex inside a T-blossom), # bestedge[w] = (v, w) is the least-slack edge from an S-vertex, # or None if there is no such edge. # If b is a (possibly trivial) top-level S-blossom, # bestedge[b] = (v, w) is the least-slack edge to a different S-blossom # (v inside b), or None if there is no such edge. # This is used for efficient computation of delta2 and delta3. bestedge = { } # If v is a vertex, # dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual # optimization problem (if all edge weights are integers, multiplication # by two ensures that all values remain integers throughout the algorithm). # Initially, u(v) = maxweight / 2. dualvar = dict(zip(gnodes, repeat(maxweight))) # If b is a non-trivial blossom, # blossomdual[b] = z(b) where z(b) is b's variable in the dual # optimization problem. blossomdual = { } # If (v, w) in allowedge or (w, v) in allowedg, then the edge # (v, w) is known to have zero slack in the optimization problem; # otherwise the edge may or may not have zero slack. allowedge = { } # Queue of newly discovered S-vertices. queue = [ ] # Return 2 * slack of edge (v, w) (does not work inside blossoms). def slack(v, w): return dualvar[v] + dualvar[w] - 2 * G[v][w].get('weight',1) # Assign label t to the top-level blossom containing vertex w, # coming through an edge from vertex v. def assignLabel(w, t, v): b = inblossom[w] assert label.get(w) is None and label.get(b) is None label[w] = label[b] = t if v is not None: labeledge[w] = labeledge[b] = (v, w) else: labeledge[w] = labeledge[b] = None bestedge[w] = bestedge[b] = None if t == 1: # b became an S-vertex/blossom; add it(s vertices) to the queue. if isinstance(b, Blossom): queue.extend(b.leaves()) else: queue.append(b) elif t == 2: # b became a T-vertex/blossom; assign label S to its mate. # (If b is a non-trivial blossom, its base is the only vertex # with an external mate.) base = blossombase[b] assignLabel(mate[base], 1, base) # Trace back from vertices v and w to discover either a new blossom # or an augmenting path. Return the base vertex of the new blossom, # or NoNode if an augmenting path was found. def scanBlossom(v, w): # Trace back from v and w, placing breadcrumbs as we go. path = [ ] base = NoNode while v is not NoNode: # Look for a breadcrumb in v's blossom or put a new breadcrumb. b = inblossom[v] if label[b] & 4: base = blossombase[b] break assert label[b] == 1 path.append(b) label[b] = 5 # Trace one step back. if labeledge[b] is None: # The base of blossom b is single; stop tracing this path. assert blossombase[b] not in mate v = NoNode else: assert labeledge[b][0] == mate[blossombase[b]] v = labeledge[b][0] b = inblossom[v] assert label[b] == 2 # b is a T-blossom; trace one more step back. v = labeledge[b][0] # Swap v and w so that we alternate between both paths. if w is not NoNode: v, w = w, v # Remove breadcrumbs. for b in path: label[b] = 1 # Return base vertex, if we found one. return base # Construct a new blossom with given base, through S-vertices v and w. # Label the new blossom as S; set its dual variable to zero; # relabel its T-vertices to S and add them to the queue. def addBlossom(base, v, w): bb = inblossom[base] bv = inblossom[v] bw = inblossom[w] # Create blossom. b = Blossom() blossombase[b] = base blossomparent[b] = None blossomparent[bb] = b # Make list of sub-blossoms and their interconnecting edge endpoints. b.childs = path = [ ] b.edges = edgs = [ (v, w) ] # Trace back from v to base. while bv != bb: # Add bv to the new blossom. blossomparent[bv] = b path.append(bv) edgs.append(labeledge[bv]) assert label[bv] == 2 or (label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]]) # Trace one step back. v = labeledge[bv][0] bv = inblossom[v] # Add base sub-blossom; reverse lists. path.append(bb) path.reverse() edgs.reverse() # Trace back from w to base. while bw != bb: # Add bw to the new blossom. blossomparent[bw] = b path.append(bw) edgs.append((labeledge[bw][1], labeledge[bw][0])) assert label[bw] == 2 or (label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]]) # Trace one step back. w = labeledge[bw][0] bw = inblossom[w] # Set label to S. assert label[bb] == 1 label[b] = 1 labeledge[b] = labeledge[bb] # Set dual variable to zero. blossomdual[b] = 0 # Relabel vertices. for v in b.leaves(): if label[inblossom[v]] == 2: # This T-vertex now turns into an S-vertex because it becomes # part of an S-blossom; add it to the queue. queue.append(v) inblossom[v] = b # Compute b.mybestedges. bestedgeto = { } for bv in path: if isinstance(bv, Blossom): if bv.mybestedges is not None: # Walk this subblossom's least-slack edges. nblist = bv.mybestedges # The sub-blossom won't need this data again. bv.mybestedges = None else: # This subblossom does not have a list of least-slack # edges; get the information from the vertices. nblist = [ (v, w) for v in bv.leaves() for w in G.neighbors_iter(v) if v != w ] else: nblist = [ (bv, w) for w in G.neighbors_iter(bv) if bv != w ] for k in nblist: (i, j) = k if inblossom[j] == b: i, j = j, i bj = inblossom[j] if (bj != b and label.get(bj) == 1 and ((bj not in bestedgeto) or slack(i, j) < slack(*bestedgeto[bj]))): bestedgeto[bj] = k # Forget about least-slack edge of the subblossom. bestedge[bv] = None b.mybestedges = list(bestedgeto.values()) # Select bestedge[b]. mybestedge = None bestedge[b] = None for k in b.mybestedges: kslack = slack(*k) if mybestedge is None or kslack < mybestslack: mybestedge = k mybestslack = kslack bestedge[b] = mybestedge # Expand the given top-level blossom. def expandBlossom(b, endstage): # Convert sub-blossoms into top-level blossoms. for s in b.childs: blossomparent[s] = None if isinstance(s, Blossom): if endstage and blossomdual[s] == 0: # Recursively expand this sub-blossom. expandBlossom(s, endstage) else: for v in s.leaves(): inblossom[v] = s else: inblossom[s] = s # If we expand a T-blossom during a stage, its sub-blossoms must be # relabeled. if (not endstage) and label.get(b) == 2: # Start at the sub-blossom through which the expanding # blossom obtained its label, and relabel sub-blossoms untili # we reach the base. # Figure out through which sub-blossom the expanding blossom # obtained its label initially. entrychild = inblossom[labeledge[b][1]] # Decide in which direction we will go round the blossom. j = b.childs.index(entrychild) if j & 1: # Start index is odd; go forward and wrap. j -= len(b.childs) jstep = 1 else: # Start index is even; go backward. jstep = -1 # Move along the blossom until we get to the base. v, w = labeledge[b] while j != 0: # Relabel the T-sub-blossom. if jstep == 1: p, q = b.edges[j] else: q, p = b.edges[j-1] label[w] = None label[q] = None assignLabel(w, 2, v) # Step to the next S-sub-blossom and note its forward edge. allowedge[(p, q)] = allowedge[(q, p)] = True j += jstep if jstep == 1: v, w = b.edges[j] else: w, v = b.edges[j-1] # Step to the next T-sub-blossom. allowedge[(v, w)] = allowedge[(w, v)] = True j += jstep # Relabel the base T-sub-blossom WITHOUT stepping through to # its mate (so don't call assignLabel). bw = b.childs[j] label[w] = label[bw] = 2 labeledge[w] = labeledge[bw] = (v, w) bestedge[bw] = None # Continue along the blossom until we get back to entrychild. j += jstep while b.childs[j] != entrychild: # Examine the vertices of the sub-blossom to see whether # it is reachable from a neighbouring S-vertex outside the # expanding blossom. bv = b.childs[j] if label.get(bv) == 1: # This sub-blossom just got label S through one of its # neighbours; leave it be. j += jstep continue if isinstance(bv, Blossom): for v in bv.leaves(): if label.get(v): break else: v = bv # If the sub-blossom contains a reachable vertex, assign # label T to the sub-blossom. if label.get(v): assert label[v] == 2 assert inblossom[v] == bv label[v] = None label[mate[blossombase[bv]]] = None assignLabel(v, 2, labeledge[v][0]) j += jstep # Remove the expanded blossom entirely. label.pop(b, None) labeledge.pop(b, None) bestedge.pop(b, None) del blossomparent[b] del blossombase[b] del blossomdual[b] # Swap matched/unmatched edges over an alternating path through blossom b # between vertex v and the base vertex. Keep blossom bookkeeping consistent. def augmentBlossom(b, v): # Bubble up through the blossom tree from vertex v to an immediate # sub-blossom of b. t = v while blossomparent[t] != b: t = blossomparent[t] # Recursively deal with the first sub-blossom. if isinstance(t, Blossom): augmentBlossom(t, v) # Decide in which direction we will go round the blossom. i = j = b.childs.index(t) if i & 1: # Start index is odd; go forward and wrap. j -= len(b.childs) jstep = 1 else: # Start index is even; go backward. jstep = -1 # Move along the blossom until we get to the base. while j != 0: # Step to the next sub-blossom and augment it recursively. j += jstep t = b.childs[j] if jstep == 1: w, x = b.edges[j] else: x, w = b.edges[j-1] if isinstance(t, Blossom): augmentBlossom(t, w) # Step to the next sub-blossom and augment it recursively. j += jstep t = b.childs[j] if isinstance(t, Blossom): augmentBlossom(t, x) # Match the edge connecting those sub-blossoms. mate[w] = x mate[x] = w # Rotate the list of sub-blossoms to put the new base at the front. b.childs = b.childs[i:] + b.childs[:i] b.edges = b.edges[i:] + b.edges[:i] blossombase[b] = blossombase[b.childs[0]] assert blossombase[b] == v # Swap matched/unmatched edges over an alternating path between two # single vertices. The augmenting path runs through S-vertices v and w. def augmentMatching(v, w): for (s, j) in ((v, w), (w, v)): # Match vertex s to vertex j. Then trace back from s # until we find a single vertex, swapping matched and unmatched # edges as we go. while 1: bs = inblossom[s] assert label[bs] == 1 assert (labeledge[bs] is None and blossombase[bs] not in mate) or (labeledge[bs][0] == mate[blossombase[bs]]) # Augment through the S-blossom from s to base. if isinstance(bs, Blossom): augmentBlossom(bs, s) # Update mate[s] mate[s] = j # Trace one step back. if labeledge[bs] is None: # Reached single vertex; stop. break t = labeledge[bs][0] bt = inblossom[t] assert label[bt] == 2 # Trace one more step back. s, j = labeledge[bt] # Augment through the T-blossom from j to base. assert blossombase[bt] == t if isinstance(bt, Blossom): augmentBlossom(bt, j) # Update mate[j] mate[j] = s # Verify that the optimum solution has been reached. def verifyOptimum(): if maxcardinality: # Vertices may have negative dual; # find a constant non-negative number to add to all vertex duals. vdualoffset = max(0, -min(dualvar.values())) else: vdualoffset = 0 # 0. all dual variables are non-negative assert min(dualvar.values()) + vdualoffset >= 0 assert len(blossomdual) == 0 or min(blossomdual.values()) >= 0 # 0. all edges have non-negative slack and # 1. all matched edges have zero slack; for i,j,d in G.edges_iter(data=True): wt=d.get('weight',1) if i == j: continue # ignore self-loops s = dualvar[i] + dualvar[j] - 2 * wt iblossoms = [ i ] jblossoms = [ j ] while blossomparent[iblossoms[-1]] is not None: iblossoms.append(blossomparent[iblossoms[-1]]) while blossomparent[jblossoms[-1]] is not None: jblossoms.append(blossomparent[jblossoms[-1]]) iblossoms.reverse() jblossoms.reverse() for (bi, bj) in zip(iblossoms, jblossoms): if bi != bj: break s += 2 * blossomdual[bi] assert s >= 0 if mate.get(i) == j or mate.get(j) == i: assert mate[i] == j and mate[j] == i assert s == 0 # 2. all single vertices have zero dual value; for v in gnodes: assert (v in mate) or dualvar[v] + vdualoffset == 0 # 3. all blossoms with positive dual value are full. for b in blossomdual: if blossomdual[b] > 0: assert len(b.edges) % 2 == 1 for (i, j) in b.edges[1::2]: assert mate[i] == j and mate[j] == i # Ok. # Main loop: continue until no further improvement is possible. while 1: # Each iteration of this loop is a "stage". # A stage finds an augmenting path and uses that to improve # the matching. # Remove labels from top-level blossoms/vertices. label.clear() labeledge.clear() # Forget all about least-slack edges. bestedge.clear() for b in blossomdual: b.mybestedges = None # Loss of labeling means that we can not be sure that currently # allowable edges remain allowable througout this stage. allowedge.clear() # Make queue empty. queue[:] = [ ] # Label single blossoms/vertices with S and put them in the queue. for v in gnodes: if (v not in mate) and label.get(inblossom[v]) is None: assignLabel(v, 1, None) # Loop until we succeed in augmenting the matching. augmented = 0 while 1: # Each iteration of this loop is a "substage". # A substage tries to find an augmenting path; # if found, the path is used to improve the matching and # the stage ends. If there is no augmenting path, the # primal-dual method is used to pump some slack out of # the dual variables. # Continue labeling until all vertices which are reachable # through an alternating path have got a label. while queue and not augmented: # Take an S vertex from the queue. v = queue.pop() assert label[inblossom[v]] == 1 # Scan its neighbours: for w in G.neighbors_iter(v): if w == v: continue # ignore self-loops # w is a neighbour to v bv = inblossom[v] bw = inblossom[w] if bv == bw: # this edge is internal to a blossom; ignore it continue if (v, w) not in allowedge: kslack = slack(v, w) if kslack <= 0: # edge k has zero slack => it is allowable allowedge[(v, w)] = allowedge[(w, v)] = True if (v, w) in allowedge: if label.get(bw) is None: # (C1) w is a free vertex; # label w with T and label its mate with S (R12). assignLabel(w, 2, v) elif label.get(bw) == 1: # (C2) w is an S-vertex (not in the same blossom); # follow back-links to discover either an # augmenting path or a new blossom. base = scanBlossom(v, w) if base is not NoNode: # Found a new blossom; add it to the blossom # bookkeeping and turn it into an S-blossom. addBlossom(base, v, w) else: # Found an augmenting path; augment the # matching and end this stage. augmentMatching(v, w) augmented = 1 break elif label.get(w) is None: # w is inside a T-blossom, but w itself has not # yet been reached from outside the blossom; # mark it as reached (we need this to relabel # during T-blossom expansion). assert label[bw] == 2 label[w] = 2 labeledge[w] = (v, w) elif label.get(bw) == 1: # keep track of the least-slack non-allowable edge to # a different S-blossom. if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]): bestedge[bv] = (v, w) elif label.get(w) is None: # w is a free vertex (or an unreached vertex inside # a T-blossom) but we can not reach it yet; # keep track of the least-slack edge that reaches w. if bestedge.get(w) is None or kslack < slack(*bestedge[w]): bestedge[w] = (v, w) if augmented: break # There is no augmenting path under these constraints; # compute delta and reduce slack in the optimization problem. # (Note that our vertex dual variables, edge slacks and delta's # are pre-multiplied by two.) deltatype = -1 delta = deltaedge = deltablossom = None # Compute delta1: the minumum value of any vertex dual. if not maxcardinality: deltatype = 1 delta = min(dualvar.values()) # Compute delta2: the minimum slack on any edge between # an S-vertex and a free vertex. for v in G.nodes_iter(): if label.get(inblossom[v]) is None and bestedge.get(v) is not None: d = slack(*bestedge[v]) if deltatype == -1 or d < delta: delta = d deltatype = 2 deltaedge = bestedge[v] # Compute delta3: half the minimum slack on any edge between # a pair of S-blossoms. for b in blossomparent: if ( blossomparent[b] is None and label.get(b) == 1 and bestedge.get(b) is not None ): kslack = slack(*bestedge[b]) if allinteger: assert (kslack % 2) == 0 d = kslack // 2 else: d = kslack / 2.0 if deltatype == -1 or d < delta: delta = d deltatype = 3 deltaedge = bestedge[b] # Compute delta4: minimum z variable of any T-blossom. for b in blossomdual: if ( blossomparent[b] is None and label.get(b) == 2 and (deltatype == -1 or blossomdual[b] < delta) ): delta = blossomdual[b] deltatype = 4 deltablossom = b if deltatype == -1: # No further improvement possible; max-cardinality optimum # reached. Do a final delta update to make the optimum # verifyable. assert maxcardinality deltatype = 1 delta = max(0, min(dualvar.values())) # Update dual variables according to delta. for v in gnodes: if label.get(inblossom[v]) == 1: # S-vertex: 2*u = 2*u - 2*delta dualvar[v] -= delta elif label.get(inblossom[v]) == 2: # T-vertex: 2*u = 2*u + 2*delta dualvar[v] += delta for b in blossomdual: if blossomparent[b] is None: if label.get(b) == 1: # top-level S-blossom: z = z + 2*delta blossomdual[b] += delta elif label.get(b) == 2: # top-level T-blossom: z = z - 2*delta blossomdual[b] -= delta # Take action at the point where minimum delta occurred. if deltatype == 1: # No further improvement possible; optimum reached. break elif deltatype == 2: # Use the least-slack edge to continue the search. (v, w) = deltaedge assert label[inblossom[v]] == 1 allowedge[(v, w)] = allowedge[(w, v)] = True queue.append(v) elif deltatype == 3: # Use the least-slack edge to continue the search. (v, w) = deltaedge allowedge[(v, w)] = allowedge[(w, v)] = True assert label[inblossom[v]] == 1 queue.append(v) elif deltatype == 4: # Expand the least-z blossom. expandBlossom(deltablossom, False) # End of a this substage. # Paranoia check that the matching is symmetric. for v in mate: assert mate[mate[v]] == v # Stop when no more augmenting path can be found. if not augmented: break # End of a stage; expand all S-blossoms which have zero dual. for b in list(blossomdual.keys()): if b not in blossomdual: continue # already expanded if ( blossomparent[b] is None and label.get(b) == 1 and blossomdual[b] == 0 ): expandBlossom(b, True) # Verify that we reached the optimum solution (only for integer weights). if allinteger: verifyOptimum() return mate networkx-1.8.1/networkx/algorithms/bipartite/0000775000175000017500000000000012177457361021314 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/bipartite/centrality.py0000664000175000017500000001771312177456333024053 0ustar aricaric00000000000000#-*- coding: utf-8 -*- # Copyright (C) 2011 by # Jordi Torrents # Aric Hagberg # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Jordi Torrents ', 'Aric Hagberg (hagberg@lanl.gov)']) __all__=['degree_centrality', 'betweenness_centrality', 'closeness_centrality'] def degree_centrality(G, nodes): r"""Compute the degree centrality for nodes in a bipartite network. The degree centrality for a node `v` is the fraction of nodes connected to it. Parameters ---------- G : graph A bipartite network nodes : list or container Container with all nodes in one bipartite node set. Returns ------- centrality : dictionary Dictionary keyed by node with bipartite degree centrality as the value. See Also -------- betweenness_centrality, closeness_centrality, sets, is_bipartite Notes ----- The nodes input parameter must conatin all nodes in one bipartite node set, but the dictionary returned contains all nodes from both bipartite node sets. For unipartite networks, the degree centrality values are normalized by dividing by the maximum possible degree (which is `n-1` where `n` is the number of nodes in G). In the bipartite case, the maximum possible degree of a node in a bipartite node set is the number of nodes in the opposite node set [1]_. The degree centrality for a node `v` in the bipartite sets `U` with `n` nodes and `V` with `m` nodes is .. math:: d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U , d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V , where `deg(v)` is the degree of node `v`. References ---------- .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook of Social Network Analysis. Sage Publications. http://www.steveborgatti.com/papers/bhaffiliations.pdf """ top = set(nodes) bottom = set(G) - top s = 1.0/len(bottom) centrality = dict((n,d*s) for n,d in G.degree_iter(top)) s = 1.0/len(top) centrality.update(dict((n,d*s) for n,d in G.degree_iter(bottom))) return centrality def betweenness_centrality(G, nodes): r"""Compute betweenness centrality for nodes in a bipartite network. Betweenness centrality of a node `v` is the sum of the fraction of all-pairs shortest paths that pass through `v`. Values of betweenness are normalized by the maximum possible value which for bipartite graphs is limited by the relative size of the two node sets [1]_. Let `n` be the number of nodes in the node set `U` and `m` be the number of nodes in the node set `V`, then nodes in `U` are normalized by dividing by .. math:: \frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] , where .. math:: s = (n - 1) \div m , t = (n - 1) \mod m , and nodes in `V` are normalized by dividing by .. math:: \frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] , where, .. math:: p = (m - 1) \div n , r = (m - 1) \mod n . Parameters ---------- G : graph A bipartite graph nodes : list or container Container with all nodes in one bipartite node set. Returns ------- betweenness : dictionary Dictionary keyed by node with bipartite betweenness centrality as the value. See Also -------- degree_centrality, closeness_centrality, sets, is_bipartite Notes ----- The nodes input parameter must contain all nodes in one bipartite node set, but the dictionary returned contains all nodes from both node sets. References ---------- .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook of Social Network Analysis. Sage Publications. http://www.steveborgatti.com/papers/bhaffiliations.pdf """ top = set(nodes) bottom = set(G) - top n = float(len(top)) m = float(len(bottom)) s = (n-1) // m t = (n-1) % m bet_max_top = (((m**2)*((s+1)**2))+ (m*(s+1)*(2*t-s-1))- (t*((2*s)-t+3)))/2.0 p = (m-1) // n r = (m-1) % n bet_max_bot = (((n**2)*((p+1)**2))+ (n*(p+1)*(2*r-p-1))- (r*((2*p)-r+3)))/2.0 betweenness = nx.betweenness_centrality(G, normalized=False, weight=None) for node in top: betweenness[node]/=bet_max_top for node in bottom: betweenness[node]/=bet_max_bot return betweenness def closeness_centrality(G, nodes, normalized=True): r"""Compute the closeness centrality for nodes in a bipartite network. The closeness of a node is the distance to all other nodes in the graph or in the case that the graph is not connected to all other nodes in the connected component containing that node. Parameters ---------- G : graph A bipartite network nodes : list or container Container with all nodes in one bipartite node set. normalized : bool, optional If True (default) normalize by connected component size. Returns ------- closeness : dictionary Dictionary keyed by node with bipartite closeness centrality as the value. See Also -------- betweenness_centrality, degree_centrality sets, is_bipartite Notes ----- The nodes input parameter must conatin all nodes in one bipartite node set, but the dictionary returned contains all nodes from both node sets. Closeness centrality is normalized by the minimum distance possible. In the bipartite case the minimum distance for a node in one bipartite node set is 1 from all nodes in the other node set and 2 from all other nodes in its own set [1]_. Thus the closeness centrality for node `v` in the two bipartite sets `U` with `n` nodes and `V` with `m` nodes is .. math:: c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U, c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V, where `d` is the sum of the distances from `v` to all other nodes. Higher values of closeness indicate higher centrality. As in the unipartite case, setting normalized=True causes the values to normalized further to n-1 / size(G)-1 where n is the number of nodes in the connected part of graph containing the node. If the graph is not completely connected, this algorithm computes the closeness centrality for each connected part separately. References ---------- .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook of Social Network Analysis. Sage Publications. http://www.steveborgatti.com/papers/bhaffiliations.pdf """ closeness={} path_length=nx.single_source_shortest_path_length top = set(nodes) bottom = set(G) - top n = float(len(top)) m = float(len(bottom)) for node in top: sp=path_length(G,node) totsp=sum(sp.values()) if totsp > 0.0 and len(G) > 1: closeness[node]= (m + 2*(n-1)) / totsp if normalized: s=(len(sp)-1.0) / ( len(G) - 1 ) closeness[node] *= s else: closeness[n]=0.0 for node in bottom: sp=path_length(G,node) totsp=sum(sp.values()) if totsp > 0.0 and len(G) > 1: closeness[node]= (n + 2*(m-1)) / totsp if normalized: s=(len(sp)-1.0) / ( len(G) - 1 ) closeness[node] *= s else: closeness[n]=0.0 return closeness networkx-1.8.1/networkx/algorithms/bipartite/spectral.py0000664000175000017500000000475212177456333023511 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Spectral bipartivity measure. """ import networkx as nx __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['spectral_bipartivity'] def spectral_bipartivity(G, nodes=None, weight='weight'): """Returns the spectral bipartivity. Parameters ---------- G : NetworkX graph nodes : list or container optional(default is all nodes) Nodes to return value of spectral bipartivity contribution. weight : string or None optional (default = 'weight') Edge data key to use for edge weights. If None, weights set to 1. Returns ------- sb : float or dict A single number if the keyword nodes is not specified, or a dictionary keyed by node with the spectral bipartivity contribution of that node as the value. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> bipartite.spectral_bipartivity(G) 1.0 Notes ----- This implementation uses Numpy (dense) matrices which are not efficient for storing large sparse graphs. See Also -------- color References ---------- .. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of bipartivity in complex networks", PhysRev E 72, 046105 (2005) """ try: import scipy.linalg except ImportError: raise ImportError('spectral_bipartivity() requires SciPy: ', 'http://scipy.org/') nodelist = G.nodes() # ordering of nodes in matrix A = nx.to_numpy_matrix(G, nodelist, weight=weight) expA = scipy.linalg.expm(A) expmA = scipy.linalg.expm(-A) coshA = 0.5 * (expA + expmA) if nodes is None: # return single number for entire graph return coshA.diagonal().sum() / expA.diagonal().sum() else: # contribution for individual nodes index = dict(zip(nodelist, range(len(nodelist)))) sb = {} for n in nodes: i = index[n] sb[n] = coshA[i, i] / expA[i, i] return sb def setup_module(module): """Fixture for nose tests.""" from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/bipartite/__init__.py0000664000175000017500000000613712177456333023432 0ustar aricaric00000000000000r""" This module provides functions and operations for bipartite graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in `E` that only connect nodes from opposite sets. It is common in the literature to use an spatial analogy referring to the two node sets as top and bottom nodes. The bipartite algorithms are not imported into the networkx namespace at the top level so the easiest way to use them is with: >>> import networkx as nx >>> from networkx.algorithms import bipartite NetworkX does not have a custom bipartite graph class but the Graph() or DiGraph() classes can be used to represent bipartite graphs. However, you have to keep track of which set each node belongs to, and make sure that there is no edge between nodes of the same set. The convention used in NetworkX is to use a node attribute named "bipartite" with values 0 or 1 to identify the sets each node belongs to. For example: >>> B = nx.Graph() >>> B.add_nodes_from([1,2,3,4], bipartite=0) # Add the node attribute "bipartite" >>> B.add_nodes_from(['a','b','c'], bipartite=1) >>> B.add_edges_from([(1,'a'), (1,'b'), (2,'b'), (2,'c'), (3,'c'), (4,'a')]) Many algorithms of the bipartite module of NetworkX require, as an argument, a container with all the nodes that belong to one set, in addition to the bipartite graph `B`. If `B` is connected, you can find the node sets using a two-coloring algorithm: >>> nx.is_connected(B) True >>> bottom_nodes, top_nodes = bipartite.sets(B) list(top_nodes) [1, 2, 3, 4] list(bottom_nodes) ['a', 'c', 'b'] However, if the input graph is not connected, there are more than one possible colorations. Thus, the following result is correct: >>> B.remove_edge(2,'c') >>> nx.is_connected(B) False >>> bottom_nodes, top_nodes = bipartite.sets(B) list(top_nodes) [1, 2, 4, 'c'] list(bottom_nodes) ['a', 3, 'b'] Using the "bipartite" node attribute, you can easily get the two node sets: >>> top_nodes = set(n for n,d in B.nodes(data=True) if d['bipartite']==0) >>> bottom_nodes = set(B) - top_nodes list(top_nodes) [1, 2, 3, 4] list(bottom_nodes) ['a', 'c', 'b'] So you can easily use the bipartite algorithms that require, as an argument, a container with all nodes that belong to one node set: >>> print(round(bipartite.density(B, bottom_nodes),2)) 0.42 >>> G = bipartite.projected_graph(B, top_nodes) >>> G.edges() [(1, 2), (1, 4)] All bipartite graph generators in NetworkX build bipartite graphs with the "bipartite" node attribute. Thus, you can use the same approach: >>> RB = nx.bipartite_random_graph(5, 7, 0.2) >>> RB_top = set(n for n,d in RB.nodes(data=True) if d['bipartite']==0) >>> RB_bottom = set(RB) - RB_top >>> list(RB_top) [0, 1, 2, 3, 4] >>> list(RB_bottom) [5, 6, 7, 8, 9, 10, 11] For other bipartite graph generators see the bipartite section of :doc:`generators`. """ from networkx.algorithms.bipartite.basic import * from networkx.algorithms.bipartite.centrality import * from networkx.algorithms.bipartite.cluster import * from networkx.algorithms.bipartite.projection import * from networkx.algorithms.bipartite.redundancy import * from networkx.algorithms.bipartite.spectral import * networkx-1.8.1/networkx/algorithms/bipartite/tests/0000775000175000017500000000000012177457361022456 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/bipartite/tests/test_centrality.py0000664000175000017500000001375712177456333026260 0ustar aricaric00000000000000from nose.tools import * import networkx as nx from networkx.algorithms import bipartite class TestBipartiteCentrality(object): def setUp(self): self.P4 = nx.path_graph(4) self.K3 = nx.complete_bipartite_graph(3,3) self.C4 = nx.cycle_graph(4) self.davis = nx.davis_southern_women_graph() self.top_nodes = [n for n,d in self.davis.nodes(data=True) if d['bipartite']==0] def test_degree_centrality(self): d = bipartite.degree_centrality(self.P4, [1,3]) answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5} assert_equal(d, answer) d = bipartite.degree_centrality(self.K3, [0,1,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} assert_equal(d, answer) d = bipartite.degree_centrality(self.C4, [0,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} assert_equal(d,answer) def test_betweenness_centrality(self): c = bipartite.betweenness_centrality(self.P4, [1,3]) answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0} assert_equal(c, answer) c = bipartite.betweenness_centrality(self.K3, [0,1,2]) answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125} assert_equal(c, answer) c = bipartite.betweenness_centrality(self.C4, [0,2]) answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} assert_equal(c, answer) def test_closeness_centrality(self): c = bipartite.closeness_centrality(self.P4, [1,3]) answer = {0: 2.0/3, 1: 1.0, 2: 1.0, 3:2.0/3} assert_equal(c, answer) c = bipartite.closeness_centrality(self.K3, [0,1,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0} assert_equal(c, answer) c = bipartite.closeness_centrality(self.C4, [0,2]) answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0} assert_equal(c, answer) G = nx.Graph() G.add_node(0) G.add_node(1) c = bipartite.closeness_centrality(G, [0]) assert_equal(c, {1: 0.0}) c = bipartite.closeness_centrality(G, [1]) assert_equal(c, {1: 0.0}) def test_davis_degree_centrality(self): G = self.davis deg = bipartite.degree_centrality(G, self.top_nodes) answer = {'E8':0.78, 'E9':0.67, 'E7':0.56, 'Nora Fayette':0.57, 'Evelyn Jefferson':0.57, 'Theresa Anderson':0.57, 'E6':0.44, 'Sylvia Avondale':0.50, 'Laura Mandeville':0.50, 'Brenda Rogers':0.50, 'Katherina Rogers':0.43, 'E5':0.44, 'Helen Lloyd':0.36, 'E3':0.33, 'Ruth DeSand':0.29, 'Verne Sanderson':0.29, 'E12':0.33, 'Myra Liddel':0.29, 'E11':0.22, 'Eleanor Nye':0.29, 'Frances Anderson':0.29, 'Pearl Oglethorpe':0.21, 'E4':0.22, 'Charlotte McDowd':0.29, 'E10':0.28, 'Olivia Carleton':0.14, 'Flora Price':0.14, 'E2':0.17, 'E1':0.17, 'Dorothy Murchison':0.14, 'E13':0.17, 'E14':0.17} for node, value in answer.items(): assert_almost_equal(value, deg[node], places=2) def test_davis_betweenness_centrality(self): G = self.davis bet = bipartite.betweenness_centrality(G, self.top_nodes) answer = {'E8':0.24, 'E9':0.23, 'E7':0.13, 'Nora Fayette':0.11, 'Evelyn Jefferson':0.10, 'Theresa Anderson':0.09, 'E6':0.07, 'Sylvia Avondale':0.07, 'Laura Mandeville':0.05, 'Brenda Rogers':0.05, 'Katherina Rogers':0.05, 'E5':0.04, 'Helen Lloyd':0.04, 'E3':0.02, 'Ruth DeSand':0.02, 'Verne Sanderson':0.02, 'E12':0.02, 'Myra Liddel':0.02, 'E11':0.02, 'Eleanor Nye':0.01, 'Frances Anderson':0.01, 'Pearl Oglethorpe':0.01, 'E4':0.01, 'Charlotte McDowd':0.01, 'E10':0.01, 'Olivia Carleton':0.01, 'Flora Price':0.01, 'E2':0.00, 'E1':0.00, 'Dorothy Murchison':0.00, 'E13':0.00, 'E14':0.00} for node, value in answer.items(): assert_almost_equal(value, bet[node], places=2) def test_davis_closeness_centrality(self): G = self.davis clos = bipartite.closeness_centrality(G, self.top_nodes) answer = {'E8':0.85, 'E9':0.79, 'E7':0.73, 'Nora Fayette':0.80, 'Evelyn Jefferson':0.80, 'Theresa Anderson':0.80, 'E6':0.69, 'Sylvia Avondale':0.77, 'Laura Mandeville':0.73, 'Brenda Rogers':0.73, 'Katherina Rogers':0.73, 'E5':0.59, 'Helen Lloyd':0.73, 'E3':0.56, 'Ruth DeSand':0.71, 'Verne Sanderson':0.71, 'E12':0.56, 'Myra Liddel':0.69, 'E11':0.54, 'Eleanor Nye':0.67, 'Frances Anderson':0.67, 'Pearl Oglethorpe':0.67, 'E4':0.54, 'Charlotte McDowd':0.60, 'E10':0.55, 'Olivia Carleton':0.59, 'Flora Price':0.59, 'E2':0.52, 'E1':0.52, 'Dorothy Murchison':0.65, 'E13':0.52, 'E14':0.52} for node, value in answer.items(): assert_almost_equal(value, clos[node], places=2) networkx-1.8.1/networkx/algorithms/bipartite/tests/test_cluster.py0000664000175000017500000000522512177456333025552 0ustar aricaric00000000000000import networkx as nx from nose.tools import * from networkx.algorithms.bipartite.cluster import cc_dot,cc_min,cc_max import networkx.algorithms.bipartite as bipartite def test_pairwise_bipartite_cc_functions(): # Test functions for different kinds of bipartite clustering coefficients # between pairs of nodes using 3 example graphs from figure 5 p. 40 # Latapy et al (2008) G1 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7)]) G2 = nx.Graph([(0,2),(0,3),(0,4),(1,3),(1,4),(1,5)]) G3 = nx.Graph([(0,2),(0,3),(0,4),(0,5),(0,6),(1,5),(1,6),(1,7),(1,8),(1,9)]) result = {0:[1/3.0, 2/3.0, 2/5.0], 1:[1/2.0, 2/3.0, 2/3.0], 2:[2/8.0, 2/5.0, 2/5.0]} for i, G in enumerate([G1, G2, G3]): assert(bipartite.is_bipartite(G)) assert(cc_dot(set(G[0]), set(G[1])) == result[i][0]) assert(cc_min(set(G[0]), set(G[1])) == result[i][1]) assert(cc_max(set(G[0]), set(G[1])) == result[i][2]) def test_star_graph(): G=nx.star_graph(3) # all modes are the same answer={0:0,1:1,2:1,3:1} assert_equal(bipartite.clustering(G,mode='dot'),answer) assert_equal(bipartite.clustering(G,mode='min'),answer) assert_equal(bipartite.clustering(G,mode='max'),answer) @raises(nx.NetworkXError) def test_not_bipartite(): bipartite.clustering(nx.complete_graph(4)) @raises(nx.NetworkXError) def test_bad_mode(): bipartite.clustering(nx.path_graph(4),mode='foo') def test_path_graph(): G=nx.path_graph(4) answer={0:0.5,1:0.5,2:0.5,3:0.5} assert_equal(bipartite.clustering(G,mode='dot'),answer) assert_equal(bipartite.clustering(G,mode='max'),answer) answer={0:1,1:1,2:1,3:1} assert_equal(bipartite.clustering(G,mode='min'),answer) def test_average_path_graph(): G=nx.path_graph(4) assert_equal(bipartite.average_clustering(G,mode='dot'),0.5) assert_equal(bipartite.average_clustering(G,mode='max'),0.5) assert_equal(bipartite.average_clustering(G,mode='min'),1) def test_ra_clustering_davis(): G = nx.davis_southern_women_graph() cc4 = round(bipartite.robins_alexander_clustering(G), 3) assert_equal(cc4, 0.468) def test_ra_clustering_square(): G = nx.path_graph(4) G.add_edge(0, 3) assert_equal(bipartite.robins_alexander_clustering(G), 1.0) def test_ra_clustering_zero(): G = nx.Graph() assert_equal(bipartite.robins_alexander_clustering(G), 0) G.add_nodes_from(range(4)) assert_equal(bipartite.robins_alexander_clustering(G), 0) G.add_edges_from([(0,1),(2,3),(3,4)]) assert_equal(bipartite.robins_alexander_clustering(G), 0) G.add_edge(1,2) assert_equal(bipartite.robins_alexander_clustering(G), 0) networkx-1.8.1/networkx/algorithms/bipartite/tests/test_basic.py0000664000175000017500000000752712177456333025161 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest from nose.plugins.attrib import attr import networkx as nx from networkx.algorithms import bipartite class TestBipartiteBasic: def test_is_bipartite(self): assert_true(bipartite.is_bipartite(nx.path_graph(4))) assert_true(bipartite.is_bipartite(nx.DiGraph([(1,0)]))) assert_false(bipartite.is_bipartite(nx.complete_graph(3))) def test_bipartite_color(self): G=nx.path_graph(4) c=bipartite.color(G) assert_equal(c,{0: 1, 1: 0, 2: 1, 3: 0}) @raises(nx.NetworkXError) def test_not_bipartite_color(self): c=bipartite.color(nx.complete_graph(4)) def test_bipartite_directed(self): G = nx.bipartite_random_graph(10, 10, 0.1, directed=True) assert_true(bipartite.is_bipartite(G)) def test_bipartite_sets(self): G=nx.path_graph(4) X,Y=bipartite.sets(G) assert_equal(X,set([0,2])) assert_equal(Y,set([1,3])) def test_is_bipartite_node_set(self): G=nx.path_graph(4) assert_true(bipartite.is_bipartite_node_set(G,[0,2])) assert_true(bipartite.is_bipartite_node_set(G,[1,3])) assert_false(bipartite.is_bipartite_node_set(G,[1,2])) G.add_path([10,20]) assert_true(bipartite.is_bipartite_node_set(G,[0,2,10])) assert_true(bipartite.is_bipartite_node_set(G,[0,2,20])) assert_true(bipartite.is_bipartite_node_set(G,[1,3,10])) assert_true(bipartite.is_bipartite_node_set(G,[1,3,20])) def test_bipartite_density(self): G=nx.path_graph(5) X,Y=bipartite.sets(G) density=float(len(G.edges()))/(len(X)*len(Y)) assert_equal(bipartite.density(G,X),density) D = nx.DiGraph(G.edges()) assert_equal(bipartite.density(D,X),density/2.0) assert_equal(bipartite.density(nx.Graph(),{}),0.0) def test_bipartite_degrees(self): G=nx.path_graph(5) X=set([1,3]) Y=set([0,2,4]) u,d=bipartite.degrees(G,Y) assert_equal(u,{1:2,3:2}) assert_equal(d,{0:1,2:2,4:1}) def test_bipartite_weighted_degrees(self): G=nx.path_graph(5) G.add_edge(0,1,weight=0.1,other=0.2) X=set([1,3]) Y=set([0,2,4]) u,d=bipartite.degrees(G,Y,weight='weight') assert_equal(u,{1:1.1,3:2}) assert_equal(d,{0:0.1,2:2,4:1}) u,d=bipartite.degrees(G,Y,weight='other') assert_equal(u,{1:1.2,3:2}) assert_equal(d,{0:0.2,2:2,4:1}) @attr('numpy') def test_biadjacency_matrix_weight(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') G=nx.path_graph(5) G.add_edge(0,1,weight=2,other=4) X=[1,3] Y=[0,2,4] M = bipartite.biadjacency_matrix(G,X,weight='weight') assert_equal(M[0,0], 2) M = bipartite.biadjacency_matrix(G, X, weight='other') assert_equal(M[0,0], 4) @attr('numpy') def test_biadjacency_matrix(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') tops = [2,5,10] bots = [5,10,15] for i in range(len(tops)): G = nx.bipartite_random_graph(tops[i], bots[i], 0.2) top = [n for n,d in G.nodes(data=True) if d['bipartite']==0] M = bipartite.biadjacency_matrix(G, top) assert_equal(M.shape[0],tops[i]) assert_equal(M.shape[1],bots[i]) @attr('numpy') def test_biadjacency_matrix_order(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') G=nx.path_graph(5) G.add_edge(0,1,weight=2) X=[3,1] Y=[4,2,0] M = bipartite.biadjacency_matrix(G,X,Y,weight='weight') assert_equal(M[1,2], 2) networkx-1.8.1/networkx/algorithms/bipartite/tests/test_project.py0000664000175000017500000003301012177456333025530 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx.algorithms import bipartite from networkx.testing import * class TestBipartiteProject: def test_path_projected_graph(self): G=nx.path_graph(4) P=bipartite.projected_graph(G,[1,3]) assert_equal(sorted(P.nodes()),[1,3]) assert_equal(sorted(P.edges()),[(1,3)]) P=bipartite.projected_graph(G,[0,2]) assert_equal(sorted(P.nodes()),[0,2]) assert_equal(sorted(P.edges()),[(0,2)]) def test_path_projected_properties_graph(self): G=nx.path_graph(4) G.add_node(1,name='one') G.add_node(2,name='two') P=bipartite.projected_graph(G,[1,3]) assert_equal(sorted(P.nodes()),[1,3]) assert_equal(sorted(P.edges()),[(1,3)]) assert_equal(P.node[1]['name'],G.node[1]['name']) P=bipartite.projected_graph(G,[0,2]) assert_equal(sorted(P.nodes()),[0,2]) assert_equal(sorted(P.edges()),[(0,2)]) assert_equal(P.node[2]['name'],G.node[2]['name']) def test_path_collaboration_projected_graph(self): G=nx.path_graph(4) P=bipartite.collaboration_weighted_projected_graph(G,[1,3]) assert_equal(sorted(P.nodes()),[1,3]) assert_equal(sorted(P.edges()),[(1,3)]) P[1][3]['weight']=1 P=bipartite.collaboration_weighted_projected_graph(G,[0,2]) assert_equal(sorted(P.nodes()),[0,2]) assert_equal(sorted(P.edges()),[(0,2)]) P[0][2]['weight']=1 def test_directed_path_collaboration_projected_graph(self): G=nx.DiGraph() G.add_path(list(range(4))) P=bipartite.collaboration_weighted_projected_graph(G,[1,3]) assert_equal(sorted(P.nodes()),[1,3]) assert_equal(sorted(P.edges()),[(1,3)]) P[1][3]['weight']=1 P=bipartite.collaboration_weighted_projected_graph(G,[0,2]) assert_equal(sorted(P.nodes()),[0,2]) assert_equal(sorted(P.edges()),[(0,2)]) P[0][2]['weight']=1 def test_path_weighted_projected_graph(self): G=nx.path_graph(4) P=bipartite.weighted_projected_graph(G,[1,3]) assert_equal(sorted(P.nodes()),[1,3]) assert_equal(sorted(P.edges()),[(1,3)]) P[1][3]['weight']=1 P=bipartite.weighted_projected_graph(G,[0,2]) assert_equal(sorted(P.nodes()),[0,2]) assert_equal(sorted(P.edges()),[(0,2)]) P[0][2]['weight']=1 def test_path_weighted_projected_directed_graph(self): G=nx.DiGraph() G.add_path(list(range(4))) P=bipartite.weighted_projected_graph(G,[1,3]) assert_equal(sorted(P.nodes()),[1,3]) assert_equal(sorted(P.edges()),[(1,3)]) P[1][3]['weight']=1 P=bipartite.weighted_projected_graph(G,[0,2]) assert_equal(sorted(P.nodes()),[0,2]) assert_equal(sorted(P.edges()),[(0,2)]) P[0][2]['weight']=1 def test_star_projected_graph(self): G=nx.star_graph(3) P=bipartite.projected_graph(G,[1,2,3]) assert_equal(sorted(P.nodes()),[1,2,3]) assert_equal(sorted(P.edges()),[(1,2),(1,3),(2,3)]) P=bipartite.weighted_projected_graph(G,[1,2,3]) assert_equal(sorted(P.nodes()),[1,2,3]) assert_equal(sorted(P.edges()),[(1,2),(1,3),(2,3)]) P=bipartite.projected_graph(G,[0]) assert_equal(sorted(P.nodes()),[0]) assert_equal(sorted(P.edges()),[]) def test_project_multigraph(self): G=nx.Graph() G.add_edge('a',1) G.add_edge('b',1) G.add_edge('a',2) G.add_edge('b',2) P=bipartite.projected_graph(G,'ab') assert_edges_equal(P.edges(),[('a','b')]) P=bipartite.weighted_projected_graph(G,'ab') assert_edges_equal(P.edges(),[('a','b')]) P=bipartite.projected_graph(G,'ab',multigraph=True) assert_edges_equal(P.edges(),[('a','b'),('a','b')]) def test_project_collaboration(self): G=nx.Graph() G.add_edge('a',1) G.add_edge('b',1) G.add_edge('b',2) G.add_edge('c',2) G.add_edge('c',3) G.add_edge('c',4) G.add_edge('b',4) P=bipartite.collaboration_weighted_projected_graph(G,'abc') assert_equal(P['a']['b']['weight'],1) assert_equal(P['b']['c']['weight'],2) def test_directed_projection(self): G=nx.DiGraph() G.add_edge('A',1) G.add_edge(1,'B') G.add_edge('A',2) G.add_edge('B',2) P=bipartite.projected_graph(G,'AB') assert_equal(sorted(P.edges()),[('A','B')]) P=bipartite.weighted_projected_graph(G,'AB') assert_equal(sorted(P.edges()),[('A','B')]) assert_equal(P['A']['B']['weight'],1) P=bipartite.projected_graph(G,'AB',multigraph=True) assert_equal(sorted(P.edges()),[('A','B')]) G=nx.DiGraph() G.add_edge('A',1) G.add_edge(1,'B') G.add_edge('A',2) G.add_edge(2,'B') P=bipartite.projected_graph(G,'AB') assert_equal(sorted(P.edges()),[('A','B')]) P=bipartite.weighted_projected_graph(G,'AB') assert_equal(sorted(P.edges()),[('A','B')]) assert_equal(P['A']['B']['weight'],2) P=bipartite.projected_graph(G,'AB',multigraph=True) assert_equal(sorted(P.edges()),[('A','B'),('A','B')]) class TestBipartiteWeightedProjection: def setUp(self): # Tore Opsahl's example # http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/ self.G=nx.Graph() self.G.add_edge('A',1) self.G.add_edge('A',2) self.G.add_edge('B',1) self.G.add_edge('B',2) self.G.add_edge('B',3) self.G.add_edge('B',4) self.G.add_edge('B',5) self.G.add_edge('C',1) self.G.add_edge('D',3) self.G.add_edge('E',4) self.G.add_edge('E',5) self.G.add_edge('E',6) self.G.add_edge('F',6) # Graph based on figure 6 from Newman (2001) self.N=nx.Graph() self.N.add_edge('A',1) self.N.add_edge('A',2) self.N.add_edge('A',3) self.N.add_edge('B',1) self.N.add_edge('B',2) self.N.add_edge('B',3) self.N.add_edge('C',1) self.N.add_edge('D',1) self.N.add_edge('E',3) def test_project_weighted_shared(self): edges=[('A','B',2), ('A','C',1), ('B','C',1), ('B','D',1), ('B','E',2), ('E','F',1)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.G,'ABCDEF') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3), ('A','E',1), ('A','C',1), ('A','D',1), ('B','E',1), ('B','C',1), ('B','D',1), ('C','D',1)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.N,'ABCDE') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) def test_project_weighted_newman(self): edges=[('A','B',1.5), ('A','C',0.5), ('B','C',0.5), ('B','D',1), ('B','E',2), ('E','F',1)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.collaboration_weighted_projected_graph(self.G,'ABCDEF') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',11/6.0), ('A','E',1/2.0), ('A','C',1/3.0), ('A','D',1/3.0), ('B','E',1/2.0), ('B','C',1/3.0), ('B','D',1/3.0), ('C','D',1/3.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.collaboration_weighted_projected_graph(self.N,'ABCDE') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) def test_project_weighted_ratio(self): edges=[('A','B',2/6.0), ('A','C',1/6.0), ('B','C',1/6.0), ('B','D',1/6.0), ('B','E',2/6.0), ('E','F',1/6.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.G, 'ABCDEF', ratio=True) assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3/3.0), ('A','E',1/3.0), ('A','C',1/3.0), ('A','D',1/3.0), ('B','E',1/3.0), ('B','C',1/3.0), ('B','D',1/3.0), ('C','D',1/3.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.weighted_projected_graph(self.N, 'ABCDE', ratio=True) assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) def test_project_weighted_overlap(self): edges=[('A','B',2/2.0), ('A','C',1/1.0), ('B','C',1/1.0), ('B','D',1/1.0), ('B','E',2/3.0), ('E','F',1/1.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.overlap_weighted_projected_graph(self.G,'ABCDEF', jaccard=False) assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3/3.0), ('A','E',1/1.0), ('A','C',1/1.0), ('A','D',1/1.0), ('B','E',1/1.0), ('B','C',1/1.0), ('B','D',1/1.0), ('C','D',1/1.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.overlap_weighted_projected_graph(self.N,'ABCDE', jaccard=False) assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) def test_project_weighted_jaccard(self): edges=[('A','B',2/5.0), ('A','C',1/2.0), ('B','C',1/5.0), ('B','D',1/5.0), ('B','E',2/6.0), ('E','F',1/3.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.overlap_weighted_projected_graph(self.G,'ABCDEF') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) edges=[('A','B',3/3.0), ('A','E',1/3.0), ('A','C',1/3.0), ('A','D',1/3.0), ('B','E',1/3.0), ('B','C',1/3.0), ('B','D',1/3.0), ('C','D',1/1.0)] Panswer=nx.Graph() Panswer.add_weighted_edges_from(edges) P=bipartite.overlap_weighted_projected_graph(self.N,'ABCDE') assert_equal(P.edges(),Panswer.edges()) for u,v in P.edges(): assert_equal(P[u][v]['weight'],Panswer[u][v]['weight']) def test_generic_weighted_projected_graph_simple(self): def shared(G, u, v): return len(set(G[u]) & set(G[v])) B = nx.path_graph(5) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4], weight_function=shared) assert_equal(sorted(G.nodes()), [0, 2, 4]) assert_equal(G.edges(data=True), [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})] ) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) assert_equal(sorted(G.nodes()), [0, 2, 4]) assert_equal(G.edges(data=True), [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})] ) B = nx.DiGraph() B.add_path(list(range(5))) G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4]) assert_equal(sorted(G.nodes()), [0, 2, 4]) assert_equal(G.edges(data=True), [(0, 2, {'weight': 1}), (2, 4, {'weight': 1})] ) def test_generic_weighted_projected_graph_custom(self): def jaccard(G, u, v): unbrs = set(G[u]) vnbrs = set(G[v]) return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) def my_weight(G, u, v, weight='weight'): w = 0 for nbr in set(G[u]) & set(G[v]): w += G.edge[u][nbr].get(weight, 1) + G.edge[v][nbr].get(weight, 1) return w B = nx.complete_bipartite_graph(2,2) for i,(u,v) in enumerate(B.edges()): B.edge[u][v]['weight'] = i + 1 G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=jaccard) assert_equal(G.edges(data=True), [(0, 1, {'weight': 1.0})]) G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=my_weight) assert_equal(G.edges(data=True), [(0, 1, {'weight': 10})]) G = bipartite.generic_weighted_projected_graph(B, [0, 1]) assert_equal(G.edges(data=True), [(0, 1, {'weight': 2})]) networkx-1.8.1/networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py0000664000175000017500000000472312177456333030336 0ustar aricaric00000000000000# -*- coding: utf-8 -*- from nose import SkipTest from nose.tools import * import networkx as nx from networkx.algorithms.bipartite import spectral_bipartivity as sb # Examples from Figure 1 # E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of # bipartivity in complex networks", PhysRev E 72, 046105 (2005) class TestSpectralBipartivity(object): @classmethod def setupClass(cls): global scipy global assert_equal global assert_almost_equal try: import scipy.linalg except ImportError: raise SkipTest('SciPy not available.') def test_star_like(self): # star-like G=nx.star_graph(2) G.add_edge(1,2) assert_almost_equal(sb(G),0.843,places=3) G=nx.star_graph(3) G.add_edge(1,2) assert_almost_equal(sb(G),0.871,places=3) G=nx.star_graph(4) G.add_edge(1,2) assert_almost_equal(sb(G),0.890,places=3) def k23_like(self): # K2,3-like G=nx.complete_bipartite_graph(2,3) G.add_edge(0,1) assert_almost_equal(sb(G),0.769,places=3) G=nx.complete_bipartite_graph(2,3) G.add_edge(2,4) assert_almost_equal(sb(G),0.829,places=3) G=nx.complete_bipartite_graph(2,3) G.add_edge(2,4) G.add_edge(3,4) assert_almost_equal(sb(G),0.731,places=3) G=nx.complete_bipartite_graph(2,3) G.add_edge(0,1) G.add_edge(2,4) assert_almost_equal(sb(G),0.692,places=3) G=nx.complete_bipartite_graph(2,3) G.add_edge(2,4) G.add_edge(3,4) G.add_edge(0,1) assert_almost_equal(sb(G),0.645,places=3) G=nx.complete_bipartite_graph(2,3) G.add_edge(2,4) G.add_edge(3,4) G.add_edge(2,3) assert_almost_equal(sb(G),0.645,places=3) G=nx.complete_bipartite_graph(2,3) G.add_edge(2,4) G.add_edge(3,4) G.add_edge(2,3) G.add_edge(0,1) assert_almost_equal(sb(G),0.597,places=3) def test_single_nodes(self): # single nodes G=nx.complete_bipartite_graph(2,3) G.add_edge(2,4) sbn=sb(G,nodes=[1,2]) assert_almost_equal(sbn[1],0.85,places=2) assert_almost_equal(sbn[2],0.77,places=2) G=nx.complete_bipartite_graph(2,3) G.add_edge(0,1) sbn=sb(G,nodes=[1,2]) assert_almost_equal(sbn[1],0.73,places=2) assert_almost_equal(sbn[2],0.82,places=2) networkx-1.8.1/networkx/algorithms/bipartite/redundancy.py0000664000175000017500000000442112177456333024021 0ustar aricaric00000000000000#-*- coding: utf-8 -*- """Node redundancy for bipartite graphs.""" # Copyright (C) 2011 by # Jordi Torrents # Aric Hagberg # All rights reserved. # BSD license. from itertools import combinations import networkx as nx __author__ = """\n""".join(['Jordi Torrents ', 'Aric Hagberg (hagberg@lanl.gov)']) __all__ = ['node_redundancy'] def node_redundancy(G, nodes=None): r"""Compute bipartite node redundancy coefficient. The redundancy coefficient of a node `v` is the fraction of pairs of neighbors of `v` that are both linked to other nodes. In a one-mode projection these nodes would be linked together even if `v` were not there. .. math:: rc(v) = \frac{|\{\{u,w\} \subseteq N(v), \: \exists v' \neq v,\: (v',u) \in E\: \mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}} where `N(v)` are the neighbors of `v` in `G`. Parameters ---------- G : graph A bipartite graph nodes : list or iterable (optional) Compute redundancy for these nodes. The default is all nodes in G. Returns ------- redundancy : dictionary A dictionary keyed by node with the node redundancy value. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.cycle_graph(4) >>> rc = bipartite.node_redundancy(G) >>> rc[0] 1.0 Compute the average redundancy for the graph: >>> sum(rc.values())/len(G) 1.0 Compute the average redundancy for a set of nodes: >>> nodes = [0, 2] >>> sum(rc[n] for n in nodes)/len(nodes) 1.0 References ---------- .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). Basic notions for the analysis of large two-mode networks. Social Networks 30(1), 31--48. """ if nodes is None: nodes = G rc = {} for v in nodes: overlap = 0.0 for u, w in combinations(G[v], 2): if len((set(G[u]) & set(G[w])) - set([v])) > 0: overlap += 1 if overlap > 0: n = len(G[v]) norm = 2.0/(n*(n-1)) else: norm = 1.0 rc[v] = overlap*norm return rc networkx-1.8.1/networkx/algorithms/bipartite/basic.py0000664000175000017500000002131612177456333022750 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ========================== Bipartite Graph Algorithms ========================== """ # Copyright (C) 2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from itertools import count __author__ = """\n""".join(['Jordi Torrents ', 'Aric Hagberg ']) __all__ = [ 'is_bipartite', 'is_bipartite_node_set', 'color', 'sets', 'density', 'degrees', 'biadjacency_matrix'] def biadjacency_matrix(G, row_order, column_order=None, weight='weight', dtype=None): r"""Return the biadjacency matrix of the bipartite graph G. Let `G = (U, V, E)` be a bipartite graph with node sets `U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency matrix [1] is the `r` x `s` matrix `B` in which `b_{i,j} = 1` if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is not `None` and matches the name of an edge attribute, its value is used instead of 1. Parameters ---------- G : graph A NetworkX graph row_order : list of nodes The rows of the matrix are ordered according to the list of nodes. column_order : list, optional The columns of the matrix are ordered according to the list of nodes. If column_order is None, then the ordering of columns is arbitrary. weight : string or None, optional (default='weight') The edge data key used to provide each value in the matrix. If None, then each edge has weight 1. dtype : NumPy data type, optional A valid single NumPy data type used to initialize the array. This must be a simple type such as int or numpy.float64 and not a compound data type (see to_numpy_recarray) If None, then the NumPy default is used. Returns ------- B : numpy matrix Biadjacency matrix representation of the bipartite graph G. Notes ----- No attempt is made to check that the input graph is bipartite. For directed bipartite graphs only successors are considered as neighbors. To obtain an adjacency matrix with ones (or weight values) for both predecessors and successors you have to generate two biadjacency matrices where the rows of one of them are the columns of the other, and then add one to the transpose of the other. See Also -------- to_numpy_matrix adjacency_matrix References ---------- [1] http://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph """ try: import numpy as np except ImportError: raise ImportError('adjacency_matrix() requires numpy ', 'http://scipy.org/') if column_order is None: column_order = list(set(G) - set(row_order)) row = dict(zip(row_order,count())) col = dict(zip(column_order,count())) M = np.zeros((len(row),len(col)), dtype=dtype) for u in row_order: for v, d in G[u].items(): M[row[u],col[v]] = d.get(weight, 1) return np.asmatrix(M) def color(G): """Returns a two-coloring of the graph. Raises an exception if the graph is not bipartite. Parameters ---------- G : NetworkX graph Returns ------- color : dictionary A dictionary keyed by node with a 1 or 0 as data for each node color. Raises ------ NetworkXError if the graph is not two-colorable. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> c = bipartite.color(G) >>> print(c) {0: 1, 1: 0, 2: 1, 3: 0} You can use this to set a node attribute indicating the biparite set: >>> nx.set_node_attributes(G, 'bipartite', c) >>> print(G.node[0]['bipartite']) 1 >>> print(G.node[1]['bipartite']) 0 """ if G.is_directed(): import itertools def neighbors(v): return itertools.chain.from_iterable([G.predecessors_iter(v), G.successors_iter(v)]) else: neighbors=G.neighbors_iter color = {} for n in G: # handle disconnected graphs if n in color or len(G[n])==0: # skip isolates continue queue = [n] color[n] = 1 # nodes seen with color (1 or 0) while queue: v = queue.pop() c = 1 - color[v] # opposite color of node v for w in neighbors(v): if w in color: if color[w] == color[v]: raise nx.NetworkXError("Graph is not bipartite.") else: color[w] = c queue.append(w) # color isolates with 0 color.update(dict.fromkeys(nx.isolates(G),0)) return color def is_bipartite(G): """ Returns True if graph G is bipartite, False if not. Parameters ---------- G : NetworkX graph Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> print(bipartite.is_bipartite(G)) True See Also -------- color, is_bipartite_node_set """ try: color(G) return True except nx.NetworkXError: return False def is_bipartite_node_set(G,nodes): """Returns True if nodes and G/nodes are a bipartition of G. Parameters ---------- G : NetworkX graph nodes: list or container Check if nodes are a one of a bipartite set. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> X = set([1,3]) >>> bipartite.is_bipartite_node_set(G,X) True Notes ----- For connected graphs the bipartite sets are unique. This function handles disconnected graphs. """ S=set(nodes) for CC in nx.connected_component_subgraphs(G): X,Y=sets(CC) if not ( (X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S)) ): return False return True def sets(G): """Returns bipartite node sets of graph G. Raises an exception if the graph is not bipartite. Parameters ---------- G : NetworkX graph Returns ------- (X,Y) : two-tuple of sets One set of nodes for each part of the bipartite graph. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) >>> X, Y = bipartite.sets(G) >>> list(X) [0, 2] >>> list(Y) [1, 3] See Also -------- color """ c = color(G) X = set(n for n in c if c[n]) # c[n] == 1 Y = set(n for n in c if not c[n]) # c[n] == 0 return (X, Y) def density(B, nodes): """Return density of bipartite graph B. Parameters ---------- G : NetworkX graph nodes: list or container Nodes in one set of the bipartite graph. Returns ------- d : float The bipartite density Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.complete_bipartite_graph(3,2) >>> X=set([0,1,2]) >>> bipartite.density(G,X) 1.0 >>> Y=set([3,4]) >>> bipartite.density(G,Y) 1.0 See Also -------- color """ n=len(B) m=nx.number_of_edges(B) nb=len(nodes) nt=n-nb if m==0: # includes cases n==0 and n==1 d=0.0 else: if B.is_directed(): d=m/(2.0*float(nb*nt)) else: d= m/float(nb*nt) return d def degrees(B, nodes, weight=None): """Return the degrees of the two node sets in the bipartite graph B. Parameters ---------- G : NetworkX graph nodes: list or container Nodes in one set of the bipartite graph. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. Returns ------- (degX,degY) : tuple of dictionaries The degrees of the two bipartite sets as dictionaries keyed by node. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.complete_bipartite_graph(3,2) >>> Y=set([3,4]) >>> degX,degY=bipartite.degrees(G,Y) >>> degX {0: 2, 1: 2, 2: 2} See Also -------- color, density """ bottom=set(nodes) top=set(B)-bottom return (B.degree(top,weight),B.degree(bottom,weight)) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/algorithms/bipartite/projection.py0000664000175000017500000003757512177456333024061 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """One-mode (unipartite) projections of bipartite graphs. """ import networkx as nx # Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """\n""".join(['Aric Hagberg ', 'Jordi Torrents ']) __all__ = ['project', 'projected_graph', 'weighted_projected_graph', 'collaboration_weighted_projected_graph', 'overlap_weighted_projected_graph', 'generic_weighted_projected_graph'] def projected_graph(B, nodes, multigraph=False): r"""Returns the projection of B onto one of its node sets. Returns the graph G that is the projection of the bipartite graph B onto the specified nodes. They retain their attributes and are connected in G if they have a common neighbor in B. Parameters ---------- B : NetworkX graph The input graph should be bipartite. nodes : list or iterable Nodes to project onto (the "bottom" nodes). multigraph: bool (default=False) If True return a multigraph where the multiple edges represent multiple shared neighbors. They edge key in the multigraph is assigned to the label of the neighbor. Returns ------- Graph : NetworkX graph or multigraph A graph that is the projection onto the given nodes. Examples -------- >>> from networkx.algorithms import bipartite >>> B = nx.path_graph(4) >>> G = bipartite.projected_graph(B, [1,3]) >>> print(G.nodes()) [1, 3] >>> print(G.edges()) [(1, 3)] If nodes `a`, and `b` are connected through both nodes 1 and 2 then building a multigraph results in two edges in the projection onto [`a`,`b`]: >>> B = nx.Graph() >>> B.add_edges_from([('a', 1), ('b', 1), ('a', 2), ('b', 2)]) >>> G = bipartite.projected_graph(B, ['a', 'b'], multigraph=True) >>> print([sorted((u,v)) for u,v in G.edges()]) [['a', 'b'], ['a', 'b']] Notes ------ No attempt is made to verify that the input graph B is bipartite. Returns a simple graph that is the projection of the bipartite graph B onto the set of nodes given in list nodes. If multigraph=True then a multigraph is returned with an edge for every shared neighbor. Directed graphs are allowed as input. The output will also then be a directed graph with edges if there is a directed path between the nodes. The graph and node properties are (shallow) copied to the projected graph. See Also -------- is_bipartite, is_bipartite_node_set, sets, weighted_projected_graph, collaboration_weighted_projected_graph, overlap_weighted_projected_graph, generic_weighted_projected_graph """ if B.is_multigraph(): raise nx.NetworkXError("not defined for multigraphs") if B.is_directed(): directed=True if multigraph: G=nx.MultiDiGraph() else: G=nx.DiGraph() else: directed=False if multigraph: G=nx.MultiGraph() else: G=nx.Graph() G.graph.update(B.graph) G.add_nodes_from((n,B.node[n]) for n in nodes) for u in nodes: nbrs2=set((v for nbr in B[u] for v in B[nbr])) -set([u]) if multigraph: for n in nbrs2: if directed: links=set(B[u]) & set(B.pred[n]) else: links=set(B[u]) & set(B[n]) for l in links: if not G.has_edge(u,n,l): G.add_edge(u,n,key=l) else: G.add_edges_from((u,n) for n in nbrs2) return G def weighted_projected_graph(B, nodes, ratio=False): r"""Returns a weighted projection of B onto one of its node sets. The weighted projected graph is the projection of the bipartite network B onto the specified nodes with weights representing the number of shared neighbors or the ratio between actual shared neighbors and possible shared neighbors if ratio=True [1]_. The nodes retain their attributes and are connected in the resulting graph if they have an edge to a common node in the original graph. Parameters ---------- B : NetworkX graph The input graph should be bipartite. nodes : list or iterable Nodes to project onto (the "bottom" nodes). ratio: Bool (default=False) If True, edge weight is the ratio between actual shared neighbors and possible shared neighbors. If False, edges weight is the number of shared neighbors. Returns ------- Graph : NetworkX graph A graph that is the projection onto the given nodes. Examples -------- >>> from networkx.algorithms import bipartite >>> B = nx.path_graph(4) >>> G = bipartite.weighted_projected_graph(B, [1,3]) >>> print(G.nodes()) [1, 3] >>> print(G.edges(data=True)) [(1, 3, {'weight': 1})] >>> G = bipartite.weighted_projected_graph(B, [1,3], ratio=True) >>> print(G.edges(data=True)) [(1, 3, {'weight': 0.5})] Notes ------ No attempt is made to verify that the input graph B is bipartite. The graph and node properties are (shallow) copied to the projected graph. See Also -------- is_bipartite, is_bipartite_node_set, sets, collaboration_weighted_projected_graph, overlap_weighted_projected_graph, generic_weighted_projected_graph projected_graph References ---------- .. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook of Social Network Analysis. Sage Publications. """ if B.is_multigraph(): raise nx.NetworkXError("not defined for multigraphs") if B.is_directed(): pred=B.pred G=nx.DiGraph() else: pred=B.adj G=nx.Graph() G.graph.update(B.graph) G.add_nodes_from((n,B.node[n]) for n in nodes) n_top = float(len(B) - len(nodes)) for u in nodes: unbrs = set(B[u]) nbrs2 = set((n for nbr in unbrs for n in B[nbr])) - set([u]) for v in nbrs2: vnbrs = set(pred[v]) common = unbrs & vnbrs if not ratio: weight = len(common) else: weight = len(common) / n_top G.add_edge(u,v,weight=weight) return G def collaboration_weighted_projected_graph(B, nodes): r"""Newman's weighted projection of B onto one of its node sets. The collaboration weighted projection is the projection of the bipartite network B onto the specified nodes with weights assigned using Newman's collaboration model [1]_: .. math:: w_{v,u} = \sum_k \frac{\delta_{v}^{w} \delta_{w}^{k}}{k_w - 1} where `v` and `u` are nodes from the same bipartite node set, and `w` is a node of the opposite node set. The value `k_w` is the degree of node `w` in the bipartite network and `\delta_{v}^{w}` is 1 if node `v` is linked to node `w` in the original bipartite graph or 0 otherwise. The nodes retain their attributes and are connected in the resulting graph if have an edge to a common node in the original bipartite graph. Parameters ---------- B : NetworkX graph The input graph should be bipartite. nodes : list or iterable Nodes to project onto (the "bottom" nodes). Returns ------- Graph : NetworkX graph A graph that is the projection onto the given nodes. Examples -------- >>> from networkx.algorithms import bipartite >>> B = nx.path_graph(5) >>> B.add_edge(1,5) >>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5]) >>> print(G.nodes()) [0, 2, 4, 5] >>> for edge in G.edges(data=True): print(edge) ... (0, 2, {'weight': 0.5}) (0, 5, {'weight': 0.5}) (2, 4, {'weight': 1.0}) (2, 5, {'weight': 0.5}) Notes ------ No attempt is made to verify that the input graph B is bipartite. The graph and node properties are (shallow) copied to the projected graph. See Also -------- is_bipartite, is_bipartite_node_set, sets, weighted_projected_graph, overlap_weighted_projected_graph, generic_weighted_projected_graph, projected_graph References ---------- .. [1] Scientific collaboration networks: II. Shortest paths, weighted networks, and centrality, M. E. J. Newman, Phys. Rev. E 64, 016132 (2001). """ if B.is_multigraph(): raise nx.NetworkXError("not defined for multigraphs") if B.is_directed(): pred=B.pred G=nx.DiGraph() else: pred=B.adj G=nx.Graph() G.graph.update(B.graph) G.add_nodes_from((n,B.node[n]) for n in nodes) for u in nodes: unbrs = set(B[u]) nbrs2 = set((n for nbr in unbrs for n in B[nbr])) - set([u]) for v in nbrs2: vnbrs = set(pred[v]) common = unbrs & vnbrs weight = sum([1.0/(len(B[n]) - 1) for n in common if len(B[n])>1]) G.add_edge(u,v,weight=weight) return G def overlap_weighted_projected_graph(B, nodes, jaccard=True): r"""Overlap weighted projection of B onto one of its node sets. The overlap weighted projection is the projection of the bipartite network B onto the specified nodes with weights representing the Jaccard index between the neighborhoods of the two nodes in the original bipartite network [1]_: .. math:: w_{v,u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|} or if the parameter 'jaccard' is False, the fraction of common neighbors by minimum of both nodes degree in the original bipartite graph [1]_: .. math:: w_{v,u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|,|N(v)|)} The nodes retain their attributes and are connected in the resulting graph if have an edge to a common node in the original bipartite graph. Parameters ---------- B : NetworkX graph The input graph should be bipartite. nodes : list or iterable Nodes to project onto (the "bottom" nodes). jaccard: Bool (default=True) Returns ------- Graph : NetworkX graph A graph that is the projection onto the given nodes. Examples -------- >>> from networkx.algorithms import bipartite >>> B = nx.path_graph(5) >>> G = bipartite.overlap_weighted_projected_graph(B, [0, 2, 4]) >>> print(G.nodes()) [0, 2, 4] >>> print(G.edges(data=True)) [(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})] >>> G = bipartite.overlap_weighted_projected_graph(B, [0, 2, 4], jaccard=False) >>> print(G.edges(data=True)) [(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})] Notes ------ No attempt is made to verify that the input graph B is bipartite. The graph and node properties are (shallow) copied to the projected graph. See Also -------- is_bipartite, is_bipartite_node_set, sets, weighted_projected_graph, collaboration_weighted_projected_graph, generic_weighted_projected_graph, projected_graph References ---------- .. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook of Social Network Analysis. Sage Publications. """ if B.is_multigraph(): raise nx.NetworkXError("not defined for multigraphs") if B.is_directed(): pred=B.pred G=nx.DiGraph() else: pred=B.adj G=nx.Graph() G.graph.update(B.graph) G.add_nodes_from((n,B.node[n]) for n in nodes) for u in nodes: unbrs = set(B[u]) nbrs2 = set((n for nbr in unbrs for n in B[nbr])) - set([u]) for v in nbrs2: vnbrs = set(pred[v]) if jaccard: weight = float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) else: weight = float(len(unbrs & vnbrs)) / min(len(unbrs),len(vnbrs)) G.add_edge(u,v,weight=weight) return G def generic_weighted_projected_graph(B, nodes, weight_function=None): r"""Weighted projection of B with a user-specified weight function. The bipartite network B is projected on to the specified nodes with weights computed by a user-specified function. This function must accept as a parameter the neighborhood sets of two nodes and return an integer or a float. The nodes retain their attributes and are connected in the resulting graph if they have an edge to a common node in the original graph. Parameters ---------- B : NetworkX graph The input graph should be bipartite. nodes : list or iterable Nodes to project onto (the "bottom" nodes). weight_function: function This function must accept as parameters the same input graph that this function, and two nodes; and return an integer or a float. The default function computes the number of shared neighbors. Returns ------- Graph : NetworkX graph A graph that is the projection onto the given nodes. Examples -------- >>> from networkx.algorithms import bipartite >>> # Define some custom weight functions >>> def jaccard(G, u, v): ... unbrs = set(G[u]) ... vnbrs = set(G[v]) ... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs) ... >>> def my_weight(G, u, v, weight='weight'): ... w = 0 ... for nbr in set(G[u]) & set(G[v]): ... w += G.edge[u][nbr].get(weight, 1) + G.edge[v][nbr].get(weight, 1) ... return w ... >>> # A complete bipartite graph with 4 nodes and 4 edges >>> B = nx.complete_bipartite_graph(2,2) >>> # Add some arbitrary weight to the edges >>> for i,(u,v) in enumerate(B.edges()): ... B.edge[u][v]['weight'] = i + 1 ... >>> for edge in B.edges(data=True): ... print(edge) ... (0, 2, {'weight': 1}) (0, 3, {'weight': 2}) (1, 2, {'weight': 3}) (1, 3, {'weight': 4}) >>> # Without specifying a function, the weight is equal to # shared partners >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1]) >>> print(G.edges(data=True)) [(0, 1, {'weight': 2})] >>> # To specify a custom weight function use the weight_function parameter >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=jaccard) >>> print(G.edges(data=True)) [(0, 1, {'weight': 1.0})] >>> G = bipartite.generic_weighted_projected_graph(B, [0, 1], weight_function=my_weight) >>> print(G.edges(data=True)) [(0, 1, {'weight': 10})] Notes ------ No attempt is made to verify that the input graph B is bipartite. The graph and node properties are (shallow) copied to the projected graph. See Also -------- is_bipartite, is_bipartite_node_set, sets, weighted_projected_graph, collaboration_weighted_projected_graph, overlap_weighted_projected_graph, projected_graph """ if B.is_multigraph(): raise nx.NetworkXError("not defined for multigraphs") if B.is_directed(): pred=B.pred G=nx.DiGraph() else: pred=B.adj G=nx.Graph() if weight_function is None: def weight_function(G, u, v): # Notice that we use set(pred[v]) for handling the directed case. return len(set(G[u]) & set(pred[v])) G.graph.update(B.graph) G.add_nodes_from((n,B.node[n]) for n in nodes) for u in nodes: nbrs2 = set((n for nbr in set(B[u]) for n in B[nbr])) - set([u]) for v in nbrs2: weight = weight_function(B, u, v) G.add_edge(u,v,weight=weight) return G def project(B, nodes, create_using=None): return projected_graph(B, nodes) networkx-1.8.1/networkx/algorithms/bipartite/cluster.py0000664000175000017500000001560212177456333023351 0ustar aricaric00000000000000#-*- coding: utf-8 -*- # Copyright (C) 2011 by # Jordi Torrents # Aric Hagberg # All rights reserved. # BSD license. import itertools import networkx as nx __author__ = """\n""".join(['Jordi Torrents ', 'Aric Hagberg (hagberg@lanl.gov)']) __all__ = [ 'clustering', 'average_clustering', 'latapy_clustering', 'robins_alexander_clustering'] # functions for computing clustering of pairs def cc_dot(nu,nv): return float(len(nu & nv))/len(nu | nv) def cc_max(nu,nv): return float(len(nu & nv))/max(len(nu),len(nv)) def cc_min(nu,nv): return float(len(nu & nv))/min(len(nu),len(nv)) modes={'dot':cc_dot, 'min':cc_min, 'max':cc_max} def latapy_clustering(G, nodes=None, mode='dot'): r"""Compute a bipartite clustering coefficient for nodes. The bipartie clustering coefficient is a measure of local density of connections defined as [1]_: .. math:: c_u = \frac{\sum_{v \in N(N(v))} c_{uv} }{|N(N(u))|} where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`, and `c_{uv}` is the pairwise clustering coefficient between nodes `u` and `v`. The mode selects the function for `c_{uv}` which can be: `dot`: .. math:: c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|} `min`: .. math:: c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)} `max`: .. math:: c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)} Parameters ---------- G : graph A bipartite graph nodes : list or iterable (optional) Compute bipartite clustering for these nodes. The default is all nodes in G. mode : string The pariwise bipartite clustering method to be used in the computation. It must be "dot", "max", or "min". Returns ------- clustering : dictionary A dictionary keyed by node with the clustering coefficient value. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.path_graph(4) # path graphs are bipartite >>> c = bipartite.clustering(G) >>> c[0] 0.5 >>> c = bipartite.clustering(G,mode='min') >>> c[0] 1.0 See Also -------- robins_alexander_clustering square_clustering average_clustering References ---------- .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). Basic notions for the analysis of large two-mode networks. Social Networks 30(1), 31--48. """ if not nx.algorithms.bipartite.is_bipartite(G): raise nx.NetworkXError("Graph is not bipartite") try: cc_func = modes[mode] except KeyError: raise nx.NetworkXError(\ "Mode for bipartite clustering must be: dot, min or max") if nodes is None: nodes = G ccs = {} for v in nodes: cc = 0.0 nbrs2=set([u for nbr in G[v] for u in G[nbr]])-set([v]) for u in nbrs2: cc += cc_func(set(G[u]),set(G[v])) if cc > 0.0: # len(nbrs2)>0 cc /= len(nbrs2) ccs[v] = cc return ccs clustering = latapy_clustering def average_clustering(G, nodes=None, mode='dot'): r"""Compute the average bipartite clustering coefficient. A clustering coefficient for the whole graph is the average, .. math:: C = \frac{1}{n}\sum_{v \in G} c_v, where `n` is the number of nodes in `G`. Similar measures for the two bipartite sets can be defined [1]_ .. math:: C_X = \frac{1}{|X|}\sum_{v \in X} c_v, where `X` is a bipartite set of `G`. Parameters ---------- G : graph a bipartite graph nodes : list or iterable, optional A container of nodes to use in computing the average. The nodes should be either the entire graph (the default) or one of the bipartite sets. mode : string The pariwise bipartite clustering method. It must be "dot", "max", or "min" Returns ------- clustering : float The average bipartite clustering for the given set of nodes or the entire graph if no nodes are specified. Examples -------- >>> from networkx.algorithms import bipartite >>> G=nx.star_graph(3) # star graphs are bipartite >>> bipartite.average_clustering(G) 0.75 >>> X,Y=bipartite.sets(G) >>> bipartite.average_clustering(G,X) 0.0 >>> bipartite.average_clustering(G,Y) 1.0 See Also -------- clustering Notes ----- The container of nodes passed to this function must contain all of the nodes in one of the bipartite sets ("top" or "bottom") in order to compute the correct average bipartite clustering coefficients. References ---------- .. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008). Basic notions for the analysis of large two-mode networks. Social Networks 30(1), 31--48. """ if nodes is None: nodes=G ccs=latapy_clustering(G, nodes=nodes, mode=mode) return float(sum(ccs[v] for v in nodes))/len(nodes) def robins_alexander_clustering(G): r"""Compute the bipartite clustering of G. Robins and Alexander [1]_ defined bipartite clustering coefficient as four times the number of four cycles `C_4` divided by the number of three paths `L_3` in a bipartite graph: .. math:: CC_4 = \frac{4 * C_4}{L_3} Parameters ---------- G : graph a bipartite graph Returns ------- clustering : float The Robins and Alexander bipartite clustering for the input graph. Examples -------- >>> from networkx.algorithms import bipartite >>> G = nx.davis_southern_women_graph() >>> print(round(bipartite.robins_alexander_clustering(G), 3)) 0.468 See Also -------- latapy_clustering square_clustering References ---------- .. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking directors: Network structure and distance in bipartite graphs. Computational & Mathematical Organization Theory 10(1), 69–94. """ if G.order() < 4 or G.size() < 3: return 0 L_3 = _threepaths(G) if L_3 == 0: return 0 C_4 = _four_cycles(G) return (4. * C_4) / L_3 def _four_cycles(G): cycles = 0 for v in G: for u, w in itertools.combinations(G[v], 2): cycles += len((set(G[u]) & set(G[w])) - set([v])) return cycles / 4 def _threepaths(G): paths = 0 for v in G: for u in G[v]: for w in set(G[u]) - set([v]): paths += len(set(G[w]) - set([v, u])) # Divide by two because we count each three path twice # one for each possible starting point return paths / 2 networkx-1.8.1/networkx/algorithms/distance_measures.py0000664000175000017500000000744412177456333023410 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Graph diameter, radius, eccentricity and other properties. """ __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['eccentricity', 'diameter', 'radius', 'periphery', 'center'] import networkx def eccentricity(G, v=None, sp=None): """Return the eccentricity of nodes in G. The eccentricity of a node v is the maximum distance from v to all other nodes in G. Parameters ---------- G : NetworkX graph A graph v : node, optional Return value of specified node sp : dict of dicts, optional All pairs shortest path lengths as a dictionary of dictionaries Returns ------- ecc : dictionary A dictionary of eccentricity values keyed by node. """ # nodes= # nodes=[] # if v is None: # none, use entire graph # nodes=G.nodes() # elif v in G: # is v a single node # nodes=[v] # else: # assume v is a container of nodes # nodes=v order=G.order() e={} for n in G.nbunch_iter(v): if sp is None: length=networkx.single_source_shortest_path_length(G,n) L = len(length) else: try: length=sp[n] L = len(length) except TypeError: raise networkx.NetworkXError('Format of "sp" is invalid.') if L != order: msg = "Graph not connected: infinite path length" raise networkx.NetworkXError(msg) e[n]=max(length.values()) if v in G: return e[v] # return single value else: return e def diameter(G, e=None): """Return the diameter of the graph G. The diameter is the maximum eccentricity. Parameters ---------- G : NetworkX graph A graph e : eccentricity dictionary, optional A precomputed dictionary of eccentricities. Returns ------- d : integer Diameter of graph See Also -------- eccentricity """ if e is None: e=eccentricity(G) return max(e.values()) def periphery(G, e=None): """Return the periphery of the graph G. The periphery is the set of nodes with eccentricity equal to the diameter. Parameters ---------- G : NetworkX graph A graph e : eccentricity dictionary, optional A precomputed dictionary of eccentricities. Returns ------- p : list List of nodes in periphery """ if e is None: e=eccentricity(G) diameter=max(e.values()) p=[v for v in e if e[v]==diameter] return p def radius(G, e=None): """Return the radius of the graph G. The radius is the minimum eccentricity. Parameters ---------- G : NetworkX graph A graph e : eccentricity dictionary, optional A precomputed dictionary of eccentricities. Returns ------- r : integer Radius of graph """ if e is None: e=eccentricity(G) return min(e.values()) def center(G, e=None): """Return the center of the graph G. The center is the set of nodes with eccentricity equal to radius. Parameters ---------- G : NetworkX graph A graph e : eccentricity dictionary, optional A precomputed dictionary of eccentricities. Returns ------- c : list List of nodes in center """ if e is None: e=eccentricity(G) # order the nodes by path length radius=min(e.values()) p=[v for v in e if e[v]==radius] return p networkx-1.8.1/networkx/algorithms/dag.py0000664000175000017500000001700612177456333020440 0ustar aricaric00000000000000# -*- coding: utf-8 -*- from fractions import gcd import networkx as nx """Algorithms for directed acyclic graphs (DAGs).""" # Copyright (C) 2006-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """\n""".join(['Aric Hagberg ', 'Dan Schult (dschult@colgate.edu)', 'Ben Edwards (bedwards@cs.unm.edu)']) __all__ = ['descendants', 'ancestors', 'topological_sort', 'topological_sort_recursive', 'is_directed_acyclic_graph', 'is_aperiodic'] def descendants(G, source): """Return all nodes reachable from `source` in G. Parameters ---------- G : NetworkX DiGraph source : node in G Returns ------- des : set() The descendants of source in G """ if not G.has_node(source): raise nx.NetworkXError("The node %s is not in the graph." % source) des = set(nx.shortest_path_length(G, source=source).keys()) - set([source]) return des def ancestors(G, source): """Return all nodes having a path to `source` in G. Parameters ---------- G : NetworkX DiGraph source : node in G Returns ------- ancestors : set() The ancestors of source in G """ if not G.has_node(source): raise nx.NetworkXError("The node %s is not in the graph." % source) anc = set(nx.shortest_path_length(G, target=source).keys()) - set([source]) return anc def is_directed_acyclic_graph(G): """Return True if the graph G is a directed acyclic graph (DAG) or False if not. Parameters ---------- G : NetworkX graph A graph Returns ------- is_dag : bool True if G is a DAG, false otherwise """ if not G.is_directed(): return False try: topological_sort(G) return True except nx.NetworkXUnfeasible: return False def topological_sort(G,nbunch=None): """Return a list of nodes in topological sort order. A topological sort is a nonunique permutation of the nodes such that an edge from u to v implies that u appears before v in the topological sort order. Parameters ---------- G : NetworkX digraph A directed graph nbunch : container of nodes (optional) Explore graph in specified order given in nbunch Raises ------ NetworkXError Topological sort is defined for directed graphs only. If the graph G is undirected, a NetworkXError is raised. NetworkXUnfeasible If G is not a directed acyclic graph (DAG) no topological sort exists and a NetworkXUnfeasible exception is raised. Notes ----- This algorithm is based on a description and proof in The Algorithm Design Manual [1]_ . See also -------- is_directed_acyclic_graph References ---------- .. [1] Skiena, S. S. The Algorithm Design Manual (Springer-Verlag, 1998). http://www.amazon.com/exec/obidos/ASIN/0387948600/ref=ase_thealgorithmrepo/ """ if not G.is_directed(): raise nx.NetworkXError( "Topological sort not defined on undirected graphs.") # nonrecursive version seen = set() order = [] explored = set() if nbunch is None: nbunch = G.nodes_iter() for v in nbunch: # process all vertices in G if v in explored: continue fringe = [v] # nodes yet to look at while fringe: w = fringe[-1] # depth first search if w in explored: # already looked down this branch fringe.pop() continue seen.add(w) # mark as seen # Check successors for cycles and for new nodes new_nodes = [] for n in G[w]: if n not in explored: if n in seen: #CYCLE !! raise nx.NetworkXUnfeasible("Graph contains a cycle.") new_nodes.append(n) if new_nodes: # Add new_nodes to fringe fringe.extend(new_nodes) else: # No new nodes so w is fully explored explored.add(w) order.append(w) fringe.pop() # done considering this node return list(reversed(order)) def topological_sort_recursive(G,nbunch=None): """Return a list of nodes in topological sort order. A topological sort is a nonunique permutation of the nodes such that an edge from u to v implies that u appears before v in the topological sort order. Parameters ---------- G : NetworkX digraph nbunch : container of nodes (optional) Explore graph in specified order given in nbunch Raises ------ NetworkXError Topological sort is defined for directed graphs only. If the graph G is undirected, a NetworkXError is raised. NetworkXUnfeasible If G is not a directed acyclic graph (DAG) no topological sort exists and a NetworkXUnfeasible exception is raised. Notes ----- This is a recursive version of topological sort. See also -------- topological_sort is_directed_acyclic_graph """ if not G.is_directed(): raise nx.NetworkXError( "Topological sort not defined on undirected graphs.") def _dfs(v): ancestors.add(v) for w in G[v]: if w in ancestors: raise nx.NetworkXUnfeasible("Graph contains a cycle.") if w not in explored: _dfs(w) ancestors.remove(v) explored.add(v) order.append(v) ancestors = set() explored = set() order = [] if nbunch is None: nbunch = G.nodes_iter() for v in nbunch: if v not in explored: _dfs(v) return list(reversed(order)) def is_aperiodic(G): """Return True if G is aperiodic. A directed graph is aperiodic if there is no integer k > 1 that divides the length of every cycle in the graph. Parameters ---------- G : NetworkX DiGraph Graph Returns ------- aperiodic : boolean True if the graph is aperiodic False otherwise Raises ------ NetworkXError If G is not directed Notes ----- This uses the method outlined in [1]_, which runs in O(m) time given m edges in G. Note that a graph is not aperiodic if it is acyclic as every integer trivial divides length 0 cycles. References ---------- .. [1] Jarvis, J. P.; Shier, D. R. (1996), Graph-theoretic analysis of finite Markov chains, in Shier, D. R.; Wallenius, K. T., Applied Mathematical Modeling: A Multidisciplinary Approach, CRC Press. """ if not G.is_directed(): raise nx.NetworkXError("is_aperiodic not defined for undirected graphs") s = next(G.nodes_iter()) levels = {s:0} this_level = [s] g = 0 l = 1 while this_level: next_level = [] for u in this_level: for v in G[u]: if v in levels: # Non-Tree Edge g = gcd(g, levels[u]-levels[v] + 1) else: # Tree Edge next_level.append(v) levels[v] = l this_level = next_level l += 1 if len(levels)==len(G): #All nodes in tree return g==1 else: return g==1 and nx.is_aperiodic(G.subgraph(set(G)-set(levels))) networkx-1.8.1/networkx/algorithms/isomorphism/0000775000175000017500000000000012177457361021702 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/isomorphism/__init__.py0000664000175000017500000000025512177456333024013 0ustar aricaric00000000000000from networkx.algorithms.isomorphism.isomorph import * from networkx.algorithms.isomorphism.vf2userfunc import * from networkx.algorithms.isomorphism.matchhelpers import * networkx-1.8.1/networkx/algorithms/isomorphism/vf2userfunc.py0000664000175000017500000001653512177456333024534 0ustar aricaric00000000000000""" Module to simplify the specification of user-defined equality functions for node and edge attributes during isomorphism checks. During the construction of an isomorphism, the algorithm considers two candidate nodes n1 in G1 and n2 in G2. The graphs G1 and G2 are then compared with respect to properties involving n1 and n2, and if the outcome is good, then the candidate nodes are considered isomorphic. NetworkX provides a simple mechanism for users to extend the comparisons to include node and edge attributes. Node attributes are handled by the node_match keyword. When considering n1 and n2, the algorithm passes their node attribute dictionaries to node_match, and if it returns False, then n1 and n2 cannot be considered to be isomorphic. Edge attributes are handled by the edge_match keyword. When considering n1 and n2, the algorithm must verify that outgoing edges from n1 are commensurate with the outgoing edges for n2. If the graph is directed, then a similar check is also performed for incoming edges. Focusing only on outgoing edges, we consider pairs of nodes (n1, v1) from G1 and (n2, v2) from G2. For graphs and digraphs, there is only one edge between (n1, v1) and only one edge between (n2, v2). Those edge attribute dictionaries are passed to edge_match, and if it returns False, then n1 and n2 cannot be considered isomorphic. For multigraphs and multidigraphs, there can be multiple edges between (n1, v1) and also multiple edges between (n2, v2). Now, there must exist an isomorphism from "all the edges between (n1, v1)" to "all the edges between (n2, v2)". So, all of the edge attribute dictionaries are passed to edge_match, and it must determine if there is an isomorphism between the two sets of edges. """ import networkx as nx from . import isomorphvf2 as vf2 __all__ = ['GraphMatcher', 'DiGraphMatcher', 'MultiGraphMatcher', 'MultiDiGraphMatcher', ] def _semantic_feasibility(self, G1_node, G2_node): """Returns True if mapping G1_node to G2_node is semantically feasible. """ # Make sure the nodes match if self.node_match is not None: nm = self.node_match(self.G1.node[G1_node], self.G2.node[G2_node]) if not nm: return False # Make sure the edges match if self.edge_match is not None: # Cached lookups G1_adj = self.G1_adj G2_adj = self.G2_adj core_1 = self.core_1 edge_match = self.edge_match for neighbor in G1_adj[G1_node]: # G1_node is not in core_1, so we must handle R_self separately if neighbor == G1_node: if not edge_match(G1_adj[G1_node][G1_node], G2_adj[G2_node][G2_node]): return False elif neighbor in core_1: if not edge_match(G1_adj[G1_node][neighbor], G2_adj[G2_node][core_1[neighbor]]): return False # syntactic check has already verified that neighbors are symmetric return True class GraphMatcher(vf2.GraphMatcher): """VF2 isomorphism checker for undirected graphs. """ def __init__(self, G1, G2, node_match=None, edge_match=None): """Initialize graph matcher. Parameters ---------- G1, G2: graph The graphs to be tested. node_match: callable A function that returns True iff node n1 in G1 and n2 in G2 should be considered equal during the isomorphism test. The function will be called like:: node_match(G1.node[n1], G2.node[n2]) That is, the function will receive the node attribute dictionaries of the nodes under consideration. If None, then no attributes are considered when testing for an isomorphism. edge_match: callable A function that returns True iff the edge attribute dictionary for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be considered equal during the isomorphism test. The function will be called like:: edge_match(G1[u1][v1], G2[u2][v2]) That is, the function will receive the edge attribute dictionaries of the edges under consideration. If None, then no attributes are considered when testing for an isomorphism. """ vf2.GraphMatcher.__init__(self, G1, G2) self.node_match = node_match self.edge_match = edge_match # These will be modified during checks to minimize code repeat. self.G1_adj = self.G1.adj self.G2_adj = self.G2.adj semantic_feasibility = _semantic_feasibility class DiGraphMatcher(vf2.DiGraphMatcher): """VF2 isomorphism checker for directed graphs. """ def __init__(self, G1, G2, node_match=None, edge_match=None): """Initialize graph matcher. Parameters ---------- G1, G2 : graph The graphs to be tested. node_match : callable A function that returns True iff node n1 in G1 and n2 in G2 should be considered equal during the isomorphism test. The function will be called like:: node_match(G1.node[n1], G2.node[n2]) That is, the function will receive the node attribute dictionaries of the nodes under consideration. If None, then no attributes are considered when testing for an isomorphism. edge_match : callable A function that returns True iff the edge attribute dictionary for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be considered equal during the isomorphism test. The function will be called like:: edge_match(G1[u1][v1], G2[u2][v2]) That is, the function will receive the edge attribute dictionaries of the edges under consideration. If None, then no attributes are considered when testing for an isomorphism. """ vf2.DiGraphMatcher.__init__(self, G1, G2) self.node_match = node_match self.edge_match = edge_match # These will be modified during checks to minimize code repeat. self.G1_adj = self.G1.adj self.G2_adj = self.G2.adj def semantic_feasibility(self, G1_node, G2_node): """Returns True if mapping G1_node to G2_node is semantically feasible.""" # Test node_match and also test edge_match on successors feasible = _semantic_feasibility(self, G1_node, G2_node) if not feasible: return False # Test edge_match on predecessors self.G1_adj = self.G1.pred self.G2_adj = self.G2.pred feasible = _semantic_feasibility(self, G1_node, G2_node) self.G1_adj = self.G1.adj self.G2_adj = self.G2.adj return feasible ## The "semantics" of edge_match are different for multi(di)graphs, but ## the implementation is the same. So, technically we do not need to ## provide "multi" versions, but we do so to match NetworkX's base classes. class MultiGraphMatcher(GraphMatcher): """VF2 isomorphism checker for undirected multigraphs. """ pass class MultiDiGraphMatcher(DiGraphMatcher): """VF2 isomorphism checker for directed multigraphs. """ pass networkx-1.8.1/networkx/algorithms/isomorphism/tests/0000775000175000017500000000000012177457361023044 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/isomorphism/tests/test_isomorphvf2.py0000664000175000017500000001674112177456333026742 0ustar aricaric00000000000000""" Tests for VF2 isomorphism algorithm. """ import os import struct import random from nose.tools import assert_true, assert_equal import networkx as nx from networkx.algorithms import isomorphism as iso class TestWikipediaExample(object): # Source: http://en.wikipedia.org/wiki/Graph_isomorphism # Nodes 'a', 'b', 'c' and 'd' form a column. # Nodes 'g', 'h', 'i' and 'j' form a column. g1edges = [['a','g'], ['a','h'], ['a','i'], ['b','g'], ['b','h'], ['b','j'], ['c','g'], ['c','i'], ['c','j'], ['d','h'], ['d','i'], ['d','j']] # Nodes 1,2,3,4 form the clockwise corners of a large square. # Nodes 5,6,7,8 form the clockwise corners of a small square g2edges = [[1,2], [2,3], [3,4], [4,1], [5,6], [6,7], [7,8], [8,5], [1,5], [2,6], [3,7], [4,8]] def test_graph(self): g1 = nx.Graph() g2 = nx.Graph() g1.add_edges_from(self.g1edges) g2.add_edges_from(self.g2edges) gm = iso.GraphMatcher(g1,g2) assert_true(gm.is_isomorphic()) mapping = sorted(gm.mapping.items()) # this mapping is only one of the possibilies # so this test needs to be reconsidered # isomap = [('a', 1), ('b', 6), ('c', 3), ('d', 8), # ('g', 2), ('h', 5), ('i', 4), ('j', 7)] # assert_equal(mapping, isomap) def test_subgraph(self): g1 = nx.Graph() g2 = nx.Graph() g1.add_edges_from(self.g1edges) g2.add_edges_from(self.g2edges) g3 = g2.subgraph([1,2,3,4]) gm = iso.GraphMatcher(g1,g3) assert_true(gm.subgraph_is_isomorphic()) class TestVF2GraphDB(object): # http://amalfi.dis.unina.it/graph/db/ @staticmethod def create_graph(filename): """Creates a Graph instance from the filename.""" # The file is assumed to be in the format from the VF2 graph database. # Each file is composed of 16-bit numbers (unsigned short int). # So we will want to read 2 bytes at a time. # We can read the number as follows: # number = struct.unpack('FN25<?/;= ,K !"#%2@9=K&'>EKO3:<?AGIN &025<>K (4: ;@FJL!&.:@K!(*6CD $'-/4BK ",>@C !%9?AHN %7H *2CIL.18:M -<>BGI$+.6D"2K&, ?CFH "*.@HJN %4LN'IJO"1?@H K );D$348E;J#%)F  &-34@AM  !#1<?BK !.@CFI  ')*56E ;O &;O $&127J$GI  $45:AKO !%./18?EN  %&8J05;>BIL *;EI4H  #05>GM +?@/2@  ./5>ACG 4B #+3EGH  *B ;?HN#27=FN23=>B  !/38AHK-@  (-58:>?CGO -/02?JO7>$BMN $'(C"/24?I 9L(F &.0<J (+B  ,<BK/29BCG !#&-2<>CMOM%DEHN;DH  &.4=J !*/;B "')9Cnetworkx-1.8.1/networkx/algorithms/isomorphism/tests/si2_b06_m200.B990000664000175000017500000000310212177456333025225 0ustar aricaric00000000000000È“©“¬´,€±ÀEHy² hs8‚-€±À+‹¢³1s¨»5Y®6¶½ ]]Ey¶O¦O¦ÇX† s[fk*I hs%—¤9ª,B«Å¬¸Bv|Œ¿ÅNœ=Ÿ« OÇR‘n C’H]/_¯ (—+\¤µ>IƒR\™¤di¸>‘Q‹µ€Ÿ±½8[fk{!@Dgjbt·~Á -p”¥S3‡¶=c9‚.ŽAm¹‚ž©rwcz¦<¡£!DVnPl¹!V¼nx‡²2OÁ2~›ÁE>PoTkUZdiI„Ç:XÃ&³%™4”kº`q!­7=:JmÃ5S®4¥Kk{+™¤µh2FG~›ÁU•LZwˆ‰·W¡–¸›§$[_Djx ¾d˜Dx{KT¹ºAP0Vje†§Ã 6‡ÄL`?£Æ¨b„·³|¿ršv#EH7W¦KTlgjxGe~†eW¡Ÿƒ >INœbœ· #H]ye²<?rwMš°45SZQuµBv¿Å)d¸uª³U_¯7?c¡%—!@CV­;©5YŠU`qMi‰° RM‰–š&a°Æ}~†Nb'*>.8‚ B«.89ª<£uŽª³<™L`qJ}†1t»‚.9BŒÅ)´ "C"5”¼_f•a˜½ÀÄ6‹)Q¤p‡²»¬´Tmº:AK1st "Y­3Ä #H]…gx|,€O4LSZŠ¥Jm -3Bvawˆ°2FÁnetworkx-1.8.1/networkx/algorithms/isomorphism/tests/si2_b06_m200.A990000664000175000017500000000046612177456333025236 0ustar aricaric00000000000000(           !!!" $%$%#!#'&$& !"networkx-1.8.1/networkx/algorithms/isomorphism/tests/iso_r01_s80.A990000664000175000017500000000264212177456333025300 0ustar aricaric00000000000000P%6:CG ,-.9:G%014 !$+02:BJ !*567<AL 26 13DGIK,29M+/FJ -1 (/6;ACDH   %1<?@AJ #%05 ,/HMO .KL $-04=';@ $36;DO/268;!%):=H #)15G ",15BFK #+?@ #&)5@AGJM*=L *49F&(+/?*/58>DL02<>M %:C #&157A &(2<AD#*.D  ')*+AFHKN,=AH*-/O-6"$&+1F>?FK&*1H)+2=IM<M !3> "%/  !$/47&?FI '-367HJNO/23HJK "$&/0J %(2<(23@ ,2ABJ &/CEJN +8&)3469=@BCHO!0@G >?C )-08<  (/6  ->CL#1<  )KLN &'-15B $%E  "#$+/ADFMO".:EHNO 0;BFM  "')46FHK!25;(?LO ')?O !)07C $,BGN 8J  (+236B &'(357K !BIAEFnetworkx-1.8.1/networkx/algorithms/isomorphism/tests/test_isomorphism.py0000664000175000017500000000223712177456333027030 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx.algorithms import isomorphism as iso class TestIsomorph: def setUp(self): self.G1=nx.Graph() self.G2=nx.Graph() self.G3=nx.Graph() self.G4=nx.Graph() self.G1.add_edges_from([ [1,2],[1,3],[1,5],[2,3] ]) self.G2.add_edges_from([ [10,20],[20,30],[10,30],[10,50] ]) self.G3.add_edges_from([ [1,2],[1,3],[1,5],[2,5] ]) self.G4.add_edges_from([ [1,2],[1,3],[1,5],[2,4] ]) def test_could_be_isomorphic(self): assert_true(iso.could_be_isomorphic(self.G1,self.G2)) assert_true(iso.could_be_isomorphic(self.G1,self.G3)) assert_false(iso.could_be_isomorphic(self.G1,self.G4)) assert_true(iso.could_be_isomorphic(self.G3,self.G2)) def test_fast_could_be_isomorphic(self): assert_true(iso.fast_could_be_isomorphic(self.G3,self.G2)) def test_faster_could_be_isomorphic(self): assert_true(iso.faster_could_be_isomorphic(self.G3,self.G2)) def test_is_isomorphic(self): assert_true(iso.is_isomorphic(self.G1,self.G2)) assert_false(iso.is_isomorphic(self.G1,self.G4)) networkx-1.8.1/networkx/algorithms/isomorphism/tests/test_vf2userfunc.py0000664000175000017500000001501112177456333026721 0ustar aricaric00000000000000""" Tests for VF2 isomorphism algorithm for weighted graphs. """ from nose.tools import assert_true, assert_false from operator import eq import networkx as nx import networkx.algorithms.isomorphism as iso def test_simple(): # 16 simple tests w = 'weight' edges = [(0,0,1),(0,0,1.5),(0,1,2),(1,0,3)] for g1 in [nx.Graph(), nx.DiGraph(), nx.MultiGraph(), nx.MultiDiGraph(), ]: g1.add_weighted_edges_from(edges) g2 = g1.subgraph(g1.nodes()) if g1.is_multigraph(): em = iso.numerical_multiedge_match('weight', 1) else: em = iso.numerical_edge_match('weight', 1) assert_true( nx.is_isomorphic(g1,g2,edge_match=em) ) for mod1, mod2 in [(False, True), (True, False), (True, True)]: # mod1 tests a regular edge # mod2 tests a selfloop if g2.is_multigraph(): if mod1: data1 = {0:{'weight':10}} if mod2: data2 = {0:{'weight':1},1:{'weight':2.5}} else: if mod1: data1 = {'weight':10} if mod2: data2 = {'weight':2.5} g2 = g1.subgraph(g1.nodes()) if mod1: if not g1.is_directed(): g2.adj[1][0] = data1 g2.adj[0][1] = data1 else: g2.succ[1][0] = data1 g2.pred[0][1] = data1 if mod2: if not g1.is_directed(): g2.adj[0][0] = data2 else: g2.succ[0][0] = data2 g2.pred[0][0] = data2 assert_false(nx.is_isomorphic(g1,g2,edge_match=em)) def test_weightkey(): g1 = nx.DiGraph() g2 = nx.DiGraph() g1.add_edge('A','B', weight=1) g2.add_edge('C','D', weight=0) assert_true( nx.is_isomorphic(g1, g2) ) em = iso.numerical_edge_match('nonexistent attribute', 1) assert_true( nx.is_isomorphic(g1, g2, edge_match=em) ) em = iso.numerical_edge_match('weight', 1) assert_false( nx.is_isomorphic(g1, g2, edge_match=em) ) g2 = nx.DiGraph() g2.add_edge('C','D') assert_true( nx.is_isomorphic(g1, g2, edge_match=em) ) class TestNodeMatch_Graph(object): def setUp(self): self.g1 = nx.Graph() self.g2 = nx.Graph() self.build() def build(self): self.nm = iso.categorical_node_match('color', '') self.em = iso.numerical_edge_match('weight', 1) self.g1.add_node('A', color='red') self.g2.add_node('C', color='blue') self.g1.add_edge('A','B', weight=1) self.g2.add_edge('C','D', weight=1) def test_noweight_nocolor(self): assert_true( nx.is_isomorphic(self.g1, self.g2) ) def test_color1(self): assert_false( nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) ) def test_color2(self): self.g1.node['A']['color'] = 'blue' assert_true( nx.is_isomorphic(self.g1, self.g2, node_match=self.nm) ) def test_weight1(self): assert_true( nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) ) def test_weight2(self): self.g1.add_edge('A', 'B', weight=2) assert_false( nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) ) def test_colorsandweights1(self): iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) assert_false(iso) def test_colorsandweights2(self): self.g1.node['A']['color'] = 'blue' iso = nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) assert_true(iso) def test_colorsandweights3(self): # make the weights disagree self.g1.add_edge('A', 'B', weight=2) assert_false( nx.is_isomorphic(self.g1, self.g2, node_match=self.nm, edge_match=self.em) ) class TestEdgeMatch_MultiGraph(object): def setUp(self): self.g1 = nx.MultiGraph() self.g2 = nx.MultiGraph() self.GM = iso.MultiGraphMatcher self.build() def build(self): g1 = self.g1 g2 = self.g2 # We will assume integer weights only. g1.add_edge('A', 'B', color='green', weight=0, size=.5) g1.add_edge('A', 'B', color='red', weight=1, size=.35) g1.add_edge('A', 'B', color='red', weight=2, size=.65) g2.add_edge('C', 'D', color='green', weight=1, size=.5) g2.add_edge('C', 'D', color='red', weight=0, size=.45) g2.add_edge('C', 'D', color='red', weight=2, size=.65) if g1.is_multigraph(): self.em = iso.numerical_multiedge_match('weight', 1) self.emc = iso.categorical_multiedge_match('color', '') self.emcm = iso.categorical_multiedge_match(['color', 'weight'], ['', 1]) self.emg1 = iso.generic_multiedge_match('color', 'red', eq) self.emg2 = iso.generic_multiedge_match(['color', 'weight', 'size'], ['red', 1, .5], [eq, eq, iso.matchhelpers.close]) else: self.em = iso.numerical_edge_match('weight', 1) self.emc = iso.categorical_edge_match('color', '') self.emcm = iso.categorical_edge_match(['color', 'weight'], ['', 1]) self.emg1 = iso.generic_multiedge_match('color', 'red', eq) self.emg2 = iso.generic_edge_match(['color', 'weight', 'size'], ['red', 1, .5], [eq, eq, iso.matchhelpers.close]) def test_weights_only(self): assert_true( nx.is_isomorphic(self.g1, self.g2, edge_match=self.em) ) def test_colors_only(self): gm = self.GM(self.g1, self.g2, edge_match=self.emc) assert_true( gm.is_isomorphic() ) def test_colorsandweights(self): gm = self.GM(self.g1, self.g2, edge_match=self.emcm) assert_false( gm.is_isomorphic() ) def test_generic1(self): gm = self.GM(self.g1, self.g2, edge_match=self.emg1) assert_true( gm.is_isomorphic() ) def test_generic2(self): gm = self.GM(self.g1, self.g2, edge_match=self.emg2) assert_false( gm.is_isomorphic() ) class TestEdgeMatch_DiGraph(TestNodeMatch_Graph): def setUp(self): self.g1 = nx.DiGraph() self.g2 = nx.DiGraph() self.build() class TestEdgeMatch_MultiDiGraph(TestEdgeMatch_MultiGraph): def setUp(self): self.g1 = nx.MultiDiGraph() self.g2 = nx.MultiDiGraph() self.GM = iso.MultiDiGraphMatcher self.build() networkx-1.8.1/networkx/algorithms/isomorphism/isomorph.py0000664000175000017500000001500212177456333024110 0ustar aricaric00000000000000""" Graph isomorphism functions. """ import networkx as nx from networkx.exception import NetworkXError __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Christopher Ellison cellison@cse.ucdavis.edu)']) # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['could_be_isomorphic', 'fast_could_be_isomorphic', 'faster_could_be_isomorphic', 'is_isomorphic'] def could_be_isomorphic(G1,G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. Parameters ---------- G1, G2 : graphs The two graphs G1 and G2 must be the same type. Notes ----- Checks for matching degree, triangle, and number of cliques sequences. """ # Check global properties if G1.order() != G2.order(): return False # Check local properties d1=G1.degree() t1=nx.triangles(G1) c1=nx.number_of_cliques(G1) props1=[ [d1[v], t1[v], c1[v]] for v in d1 ] props1.sort() d2=G2.degree() t2=nx.triangles(G2) c2=nx.number_of_cliques(G2) props2=[ [d2[v], t2[v], c2[v]] for v in d2 ] props2.sort() if props1 != props2: return False # OK... return True graph_could_be_isomorphic=could_be_isomorphic def fast_could_be_isomorphic(G1,G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. Parameters ---------- G1, G2 : graphs The two graphs G1 and G2 must be the same type. Notes ----- Checks for matching degree and triangle sequences. """ # Check global properties if G1.order() != G2.order(): return False # Check local properties d1=G1.degree() t1=nx.triangles(G1) props1=[ [d1[v], t1[v]] for v in d1 ] props1.sort() d2=G2.degree() t2=nx.triangles(G2) props2=[ [d2[v], t2[v]] for v in d2 ] props2.sort() if props1 != props2: return False # OK... return True fast_graph_could_be_isomorphic=fast_could_be_isomorphic def faster_could_be_isomorphic(G1,G2): """Returns False if graphs are definitely not isomorphic. True does NOT guarantee isomorphism. Parameters ---------- G1, G2 : graphs The two graphs G1 and G2 must be the same type. Notes ----- Checks for matching degree sequences. """ # Check global properties if G1.order() != G2.order(): return False # Check local properties d1=list(G1.degree().values()) d1.sort() d2=list(G2.degree().values()) d2.sort() if d1 != d2: return False # OK... return True faster_graph_could_be_isomorphic=faster_could_be_isomorphic def is_isomorphic(G1, G2, node_match=None, edge_match=None): """Returns True if the graphs G1 and G2 are isomorphic and False otherwise. Parameters ---------- G1, G2: graphs The two graphs G1 and G2 must be the same type. node_match : callable A function that returns True if node n1 in G1 and n2 in G2 should be considered equal during the isomorphism test. If node_match is not specified then node attributes are not considered. The function will be called like node_match(G1.node[n1], G2.node[n2]). That is, the function will receive the node attribute dictionaries for n1 and n2 as inputs. edge_match : callable A function that returns True if the edge attribute dictionary for the pair of nodes (u1, v1) in G1 and (u2, v2) in G2 should be considered equal during the isomorphism test. If edge_match is not specified then edge attributes are not considered. The function will be called like edge_match(G1[u1][v1], G2[u2][v2]). That is, the function will receive the edge attribute dictionaries of the edges under consideration. Notes ----- Uses the vf2 algorithm [1]_. Examples -------- >>> import networkx.algorithms.isomorphism as iso For digraphs G1 and G2, using 'weight' edge attribute (default: 1) >>> G1 = nx.DiGraph() >>> G2 = nx.DiGraph() >>> G1.add_path([1,2,3,4],weight=1) >>> G2.add_path([10,20,30,40],weight=2) >>> em = iso.numerical_edge_match('weight', 1) >>> nx.is_isomorphic(G1, G2) # no weights considered True >>> nx.is_isomorphic(G1, G2, edge_match=em) # match weights False For multidigraphs G1 and G2, using 'fill' node attribute (default: '') >>> G1 = nx.MultiDiGraph() >>> G2 = nx.MultiDiGraph() >>> G1.add_nodes_from([1,2,3],fill='red') >>> G2.add_nodes_from([10,20,30,40],fill='red') >>> G1.add_path([1,2,3,4],weight=3, linewidth=2.5) >>> G2.add_path([10,20,30,40],weight=3) >>> nm = iso.categorical_node_match('fill', 'red') >>> nx.is_isomorphic(G1, G2, node_match=nm) True For multidigraphs G1 and G2, using 'weight' edge attribute (default: 7) >>> G1.add_edge(1,2, weight=7) >>> G2.add_edge(10,20) >>> em = iso.numerical_multiedge_match('weight', 7, rtol=1e-6) >>> nx.is_isomorphic(G1, G2, edge_match=em) True For multigraphs G1 and G2, using 'weight' and 'linewidth' edge attributes with default values 7 and 2.5. Also using 'fill' node attribute with default value 'red'. >>> em = iso.numerical_multiedge_match(['weight', 'linewidth'], [7, 2.5]) >>> nm = iso.categorical_node_match('fill', 'red') >>> nx.is_isomorphic(G1, G2, edge_match=em, node_match=nm) True See Also -------- numerical_node_match, numerical_edge_match, numerical_multiedge_match categorical_node_match, categorical_edge_match, categorical_multiedge_match References ---------- .. [1] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop on Graph-based Representations in Pattern Recognition, Cuen, pp. 149-159, 2001. http://amalfi.dis.unina.it/graph/db/papers/vf-algorithm.pdf """ if G1.is_directed() and G2.is_directed(): GM = nx.algorithms.isomorphism.DiGraphMatcher elif (not G1.is_directed()) and (not G2.is_directed()): GM = nx.algorithms.isomorphism.GraphMatcher else: raise NetworkXError("Graphs G1 and G2 are not of the same type.") gm = GM(G1, G2, node_match=node_match, edge_match=edge_match) return gm.is_isomorphic() networkx-1.8.1/networkx/algorithms/isomorphism/isomorphvf2.py0000664000175000017500000010767212177456333024545 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ************* VF2 Algorithm ************* An implementation of VF2 algorithm for graph ismorphism testing. The simplest interface to use this module is to call networkx.is_isomorphic(). Introduction ------------ The GraphMatcher and DiGraphMatcher are responsible for matching graphs or directed graphs in a predetermined manner. This usually means a check for an isomorphism, though other checks are also possible. For example, a subgraph of one graph can be checked for isomorphism to a second graph. Matching is done via syntactic feasibility. It is also possible to check for semantic feasibility. Feasibility, then, is defined as the logical AND of the two functions. To include a semantic check, the (Di)GraphMatcher class should be subclassed, and the semantic_feasibility() function should be redefined. By default, the semantic feasibility function always returns True. The effect of this is that semantics are not considered in the matching of G1 and G2. Examples -------- Suppose G1 and G2 are isomorphic graphs. Verification is as follows: >>> from networkx.algorithms import isomorphism >>> G1 = nx.path_graph(4) >>> G2 = nx.path_graph(4) >>> GM = isomorphism.GraphMatcher(G1,G2) >>> GM.is_isomorphic() True GM.mapping stores the isomorphism mapping from G1 to G2. >>> GM.mapping {0: 0, 1: 1, 2: 2, 3: 3} Suppose G1 and G2 are isomorphic directed graphs graphs. Verification is as follows: >>> G1 = nx.path_graph(4, create_using=nx.DiGraph()) >>> G2 = nx.path_graph(4, create_using=nx.DiGraph()) >>> DiGM = isomorphism.DiGraphMatcher(G1,G2) >>> DiGM.is_isomorphic() True DiGM.mapping stores the isomorphism mapping from G1 to G2. >>> DiGM.mapping {0: 0, 1: 1, 2: 2, 3: 3} Subgraph Isomorphism -------------------- Graph theory literature can be ambiguious about the meaning of the above statement, and we seek to clarify it now. In the VF2 literature, a mapping M is said to be a graph-subgraph isomorphism iff M is an isomorphism between G2 and a subgraph of G1. Thus, to say that G1 and G2 are graph-subgraph isomorphic is to say that a subgraph of G1 is isomorphic to G2. Other literature uses the phrase 'subgraph isomorphic' as in 'G1 does not have a subgraph isomorphic to G2'. Another use is as an in adverb for isomorphic. Thus, to say that G1 and G2 are subgraph isomorphic is to say that a subgraph of G1 is isomorphic to G2. Finally, the term 'subgraph' can have multiple meanings. In this context, 'subgraph' always means a 'node-induced subgraph'. Edge-induced subgraph isomorphisms are not directly supported, but one should be able to perform the check by making use of nx.line_graph(). For subgraphs which are not induced, the term 'monomorphism' is preferred over 'isomorphism'. Currently, it is not possible to check for monomorphisms. Let G=(N,E) be a graph with a set of nodes N and set of edges E. If G'=(N',E') is a subgraph, then: N' is a subset of N E' is a subset of E If G'=(N',E') is a node-induced subgraph, then: N' is a subset of N E' is the subset of edges in E relating nodes in N' If G'=(N',E') is an edge-induced subgrpah, then: N' is the subset of nodes in N related by edges in E' E' is a subset of E References ---------- [1] Luigi P. Cordella, Pasquale Foggia, Carlo Sansone, Mario Vento, "A (Sub)Graph Isomorphism Algorithm for Matching Large Graphs", IEEE Transactions on Pattern Analysis and Machine Intelligence, vol. 26, no. 10, pp. 1367-1372, Oct., 2004. http://ieeexplore.ieee.org/iel5/34/29305/01323804.pdf [2] L. P. Cordella, P. Foggia, C. Sansone, M. Vento, "An Improved Algorithm for Matching Large Graphs", 3rd IAPR-TC15 Workshop on Graph-based Representations in Pattern Recognition, Cuen, pp. 149-159, 2001. http://amalfi.dis.unina.it/graph/db/papers/vf-algorithm.pdf See Also -------- syntactic_feasibliity(), semantic_feasibility() Notes ----- Modified to handle undirected graphs. Modified to handle multiple edges. In general, this problem is NP-Complete. """ # Copyright (C) 2007-2009 by the NetworkX maintainers # All rights reserved. # BSD license. # This work was originally coded by Christopher Ellison # as part of the Computational Mechanics Python (CMPy) project. # James P. Crutchfield, principal investigator. # Complexity Sciences Center and Physics Department, UC Davis. import sys import networkx as nx __all__ = ['GraphMatcher', 'DiGraphMatcher'] class GraphMatcher(object): """Implementation of VF2 algorithm for matching undirected graphs. Suitable for Graph and MultiGraph instances. """ def __init__(self, G1, G2): """Initialize GraphMatcher. Parameters ---------- G1,G2: NetworkX Graph or MultiGraph instances. The two graphs to check for isomorphism. Examples -------- To create a GraphMatcher which checks for syntactic feasibility: >>> from networkx.algorithms import isomorphism >>> G1 = nx.path_graph(4) >>> G2 = nx.path_graph(4) >>> GM = isomorphism.GraphMatcher(G1,G2) """ self.G1 = G1 self.G2 = G2 self.G1_nodes = set(G1.nodes()) self.G2_nodes = set(G2.nodes()) # Set recursion limit. self.old_recursion_limit = sys.getrecursionlimit() expected_max_recursion_level = len(self.G2) if self.old_recursion_limit < 1.5 * expected_max_recursion_level: # Give some breathing room. sys.setrecursionlimit(int(1.5 * expected_max_recursion_level)) # Declare that we will be searching for a graph-graph isomorphism. self.test = 'graph' # Initialize state self.initialize() def reset_recursion_limit(self): """Restores the recursion limit.""" ### TODO: ### Currently, we use recursion and set the recursion level higher. ### It would be nice to restore the level, but because the ### (Di)GraphMatcher classes make use of cyclic references, garbage ### collection will never happen when we define __del__() to ### restore the recursion level. The result is a memory leak. ### So for now, we do not automatically restore the recursion level, ### and instead provide a method to do this manually. Eventually, ### we should turn this into a non-recursive implementation. sys.setrecursionlimit(self.old_recursion_limit) def candidate_pairs_iter(self): """Iterator over candidate pairs of nodes in G1 and G2.""" # All computations are done using the current state! G1_nodes = self.G1_nodes G2_nodes = self.G2_nodes # First we compute the inout-terminal sets. T1_inout = [node for node in G1_nodes if (node in self.inout_1) and (node not in self.core_1)] T2_inout = [node for node in G2_nodes if (node in self.inout_2) and (node not in self.core_2)] # If T1_inout and T2_inout are both nonempty. # P(s) = T1_inout x {min T2_inout} if T1_inout and T2_inout: for node in T1_inout: yield node, min(T2_inout) else: # If T1_inout and T2_inout were both empty.... # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} ##if not (T1_inout or T2_inout): # as suggested by [2], incorrect if 1: # as inferred from [1], correct # First we determine the candidate node for G2 other_node = min(G2_nodes - set(self.core_2)) for node in self.G1: if node not in self.core_1: yield node, other_node # For all other cases, we don't have any candidate pairs. def initialize(self): """Reinitializes the state of the algorithm. This method should be redefined if using something other than GMState. If only subclassing GraphMatcher, a redefinition is not necessary. """ # core_1[n] contains the index of the node paired with n, which is m, # provided n is in the mapping. # core_2[m] contains the index of the node paired with m, which is n, # provided m is in the mapping. self.core_1 = {} self.core_2 = {} # See the paper for definitions of M_x and T_x^{y} # inout_1[n] is non-zero if n is in M_1 or in T_1^{inout} # inout_2[m] is non-zero if m is in M_2 or in T_2^{inout} # # The value stored is the depth of the SSR tree when the node became # part of the corresponding set. self.inout_1 = {} self.inout_2 = {} # Practically, these sets simply store the nodes in the subgraph. self.state = GMState(self) # Provide a convienient way to access the isomorphism mapping. self.mapping = self.core_1.copy() def is_isomorphic(self): """Returns True if G1 and G2 are isomorphic graphs.""" # Let's do two very quick checks! # QUESTION: Should we call faster_graph_could_be_isomorphic(G1,G2)? # For now, I just copy the code. # Check global properties if self.G1.order() != self.G2.order(): return False # Check local properties d1=sorted(self.G1.degree().values()) d2=sorted(self.G2.degree().values()) if d1 != d2: return False try: x = next(self.isomorphisms_iter()) return True except StopIteration: return False def isomorphisms_iter(self): """Generator over isomorphisms between G1 and G2.""" # Declare that we are looking for a graph-graph isomorphism. self.test = 'graph' self.initialize() for mapping in self.match(): yield mapping def match(self): """Extends the isomorphism mapping. This function is called recursively to determine if a complete isomorphism can be found between G1 and G2. It cleans up the class variables after each recursive call. If an isomorphism is found, we yield the mapping. """ if len(self.core_1) == len(self.G2): # Save the final mapping, otherwise garbage collection deletes it. self.mapping = self.core_1.copy() # The mapping is complete. yield self.mapping else: for G1_node, G2_node in self.candidate_pairs_iter(): if self.syntactic_feasibility(G1_node, G2_node): if self.semantic_feasibility(G1_node, G2_node): # Recursive call, adding the feasible state. newstate = self.state.__class__(self, G1_node, G2_node) for mapping in self.match(): yield mapping # restore data structures newstate.restore() def semantic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is symantically feasible. The semantic feasibility function should return True if it is acceptable to add the candidate pair (G1_node, G2_node) to the current partial isomorphism mapping. The logic should focus on semantic information contained in the edge data or a formalized node class. By acceptable, we mean that the subsequent mapping can still become a complete isomorphism mapping. Thus, if adding the candidate pair definitely makes it so that the subsequent mapping cannot become a complete isomorphism mapping, then this function must return False. The default semantic feasibility function always returns True. The effect is that semantics are not considered in the matching of G1 and G2. The semantic checks might differ based on the what type of test is being performed. A keyword description of the test is stored in self.test. Here is a quick description of the currently implemented tests:: test='graph' Indicates that the graph matcher is looking for a graph-graph isomorphism. test='subgraph' Indicates that the graph matcher is looking for a subgraph-graph isomorphism such that a subgraph of G1 is isomorphic to G2. Any subclass which redefines semantic_feasibility() must maintain the above form to keep the match() method functional. Implementations should consider multigraphs. """ return True def subgraph_is_isomorphic(self): """Returns True if a subgraph of G1 is isomorphic to G2.""" try: x = next(self.subgraph_isomorphisms_iter()) return True except StopIteration: return False # subgraph_is_isomorphic.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) def subgraph_isomorphisms_iter(self): """Generator over isomorphisms between a subgraph of G1 and G2.""" # Declare that we are looking for graph-subgraph isomorphism. self.test = 'subgraph' self.initialize() for mapping in self.match(): yield mapping # subgraph_isomorphisms_iter.__doc__ += "\n" + subgraph.replace('\n','\n'+indent) def syntactic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is syntactically feasible. This function returns True if it is adding the candidate pair to the current partial isomorphism mapping is allowable. The addition is allowable if the inclusion of the candidate pair does not make it impossible for an isomorphism to be found. """ # The VF2 algorithm was designed to work with graphs having, at most, # one edge connecting any two nodes. This is not the case when # dealing with an MultiGraphs. # # Basically, when we test the look-ahead rules R_neighbor, we will # make sure that the number of edges are checked. We also add # a R_self check to verify that the number of selfloops is acceptable. # # Users might be comparing Graph instances with MultiGraph instances. # So the generic GraphMatcher class must work with MultiGraphs. # Care must be taken since the value in the innermost dictionary is a # singlet for Graph instances. For MultiGraphs, the value in the # innermost dictionary is a list. ### ### Test at each step to get a return value as soon as possible. ### ### Look ahead 0 # R_self # The number of selfloops for G1_node must equal the number of # self-loops for G2_node. Without this check, we would fail on # R_neighbor at the next recursion level. But it is good to prune the # search tree now. if self.G1.number_of_edges(G1_node,G1_node) != self.G2.number_of_edges(G2_node,G2_node): return False # R_neighbor # For each neighbor n' of n in the partial mapping, the corresponding # node m' is a neighbor of m, and vice versa. Also, the number of # edges must be equal. for neighbor in self.G1[G1_node]: if neighbor in self.core_1: if not (self.core_1[neighbor] in self.G2[G2_node]): return False elif self.G1.number_of_edges(neighbor, G1_node) != self.G2.number_of_edges(self.core_1[neighbor], G2_node): return False for neighbor in self.G2[G2_node]: if neighbor in self.core_2: if not (self.core_2[neighbor] in self.G1[G1_node]): return False elif self.G1.number_of_edges(self.core_2[neighbor], G1_node) != self.G2.number_of_edges(neighbor, G2_node): return False ### Look ahead 1 # R_terminout # The number of neighbors of n that are in T_1^{inout} is equal to the # number of neighbors of m that are in T_2^{inout}, and vice versa. num1 = 0 for neighbor in self.G1[G1_node]: if (neighbor in self.inout_1) and (neighbor not in self.core_1): num1 += 1 num2 = 0 for neighbor in self.G2[G2_node]: if (neighbor in self.inout_2) and (neighbor not in self.core_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False ### Look ahead 2 # R_new # The number of neighbors of n that are neither in the core_1 nor # T_1^{inout} is equal to the number of neighbors of m # that are neither in core_2 nor T_2^{inout}. num1 = 0 for neighbor in self.G1[G1_node]: if neighbor not in self.inout_1: num1 += 1 num2 = 0 for neighbor in self.G2[G2_node]: if neighbor not in self.inout_2: num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False # Otherwise, this node pair is syntactically feasible! return True class DiGraphMatcher(GraphMatcher): """Implementation of VF2 algorithm for matching directed graphs. Suitable for DiGraph and MultiDiGraph instances. """ # __doc__ += "Notes\n%s-----" % (indent,) + sources.replace('\n','\n'+indent) def __init__(self, G1, G2): """Initialize DiGraphMatcher. G1 and G2 should be nx.Graph or nx.MultiGraph instances. Examples -------- To create a GraphMatcher which checks for syntactic feasibility: >>> from networkx.algorithms import isomorphism >>> G1 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) >>> G2 = nx.DiGraph(nx.path_graph(4, create_using=nx.DiGraph())) >>> DiGM = isomorphism.DiGraphMatcher(G1,G2) """ super(DiGraphMatcher, self).__init__(G1, G2) def candidate_pairs_iter(self): """Iterator over candidate pairs of nodes in G1 and G2.""" # All computations are done using the current state! G1_nodes = self.G1_nodes G2_nodes = self.G2_nodes # First we compute the out-terminal sets. T1_out = [node for node in G1_nodes if (node in self.out_1) and (node not in self.core_1)] T2_out = [node for node in G2_nodes if (node in self.out_2) and (node not in self.core_2)] # If T1_out and T2_out are both nonempty. # P(s) = T1_out x {min T2_out} if T1_out and T2_out: node_2 = min(T2_out) for node_1 in T1_out: yield node_1, node_2 # If T1_out and T2_out were both empty.... # We compute the in-terminal sets. ##elif not (T1_out or T2_out): # as suggested by [2], incorrect else: # as suggested by [1], correct T1_in = [node for node in G1_nodes if (node in self.in_1) and (node not in self.core_1)] T2_in = [node for node in G2_nodes if (node in self.in_2) and (node not in self.core_2)] # If T1_in and T2_in are both nonempty. # P(s) = T1_out x {min T2_out} if T1_in and T2_in: node_2 = min(T2_in) for node_1 in T1_in: yield node_1, node_2 # If all terminal sets are empty... # P(s) = (N_1 - M_1) x {min (N_2 - M_2)} ##elif not (T1_in or T2_in): # as suggested by [2], incorrect else: # as inferred from [1], correct node_2 = min(G2_nodes - set(self.core_2)) for node_1 in G1_nodes: if node_1 not in self.core_1: yield node_1, node_2 # For all other cases, we don't have any candidate pairs. def initialize(self): """Reinitializes the state of the algorithm. This method should be redefined if using something other than DiGMState. If only subclassing GraphMatcher, a redefinition is not necessary. """ # core_1[n] contains the index of the node paired with n, which is m, # provided n is in the mapping. # core_2[m] contains the index of the node paired with m, which is n, # provided m is in the mapping. self.core_1 = {} self.core_2 = {} # See the paper for definitions of M_x and T_x^{y} # in_1[n] is non-zero if n is in M_1 or in T_1^{in} # out_1[n] is non-zero if n is in M_1 or in T_1^{out} # # in_2[m] is non-zero if m is in M_2 or in T_2^{in} # out_2[m] is non-zero if m is in M_2 or in T_2^{out} # # The value stored is the depth of the search tree when the node became # part of the corresponding set. self.in_1 = {} self.in_2 = {} self.out_1 = {} self.out_2 = {} self.state = DiGMState(self) # Provide a convienient way to access the isomorphism mapping. self.mapping = self.core_1.copy() def syntactic_feasibility(self, G1_node, G2_node): """Returns True if adding (G1_node, G2_node) is syntactically feasible. This function returns True if it is adding the candidate pair to the current partial isomorphism mapping is allowable. The addition is allowable if the inclusion of the candidate pair does not make it impossible for an isomorphism to be found. """ # The VF2 algorithm was designed to work with graphs having, at most, # one edge connecting any two nodes. This is not the case when # dealing with an MultiGraphs. # # Basically, when we test the look-ahead rules R_pred and R_succ, we # will make sure that the number of edges are checked. We also add # a R_self check to verify that the number of selfloops is acceptable. # Users might be comparing DiGraph instances with MultiDiGraph # instances. So the generic DiGraphMatcher class must work with # MultiDiGraphs. Care must be taken since the value in the innermost # dictionary is a singlet for DiGraph instances. For MultiDiGraphs, # the value in the innermost dictionary is a list. ### ### Test at each step to get a return value as soon as possible. ### ### Look ahead 0 # R_self # The number of selfloops for G1_node must equal the number of # self-loops for G2_node. Without this check, we would fail on R_pred # at the next recursion level. This should prune the tree even further. if self.G1.number_of_edges(G1_node,G1_node) != self.G2.number_of_edges(G2_node,G2_node): return False # R_pred # For each predecessor n' of n in the partial mapping, the # corresponding node m' is a predecessor of m, and vice versa. Also, # the number of edges must be equal for predecessor in self.G1.pred[G1_node]: if predecessor in self.core_1: if not (self.core_1[predecessor] in self.G2.pred[G2_node]): return False elif self.G1.number_of_edges(predecessor, G1_node) != self.G2.number_of_edges(self.core_1[predecessor], G2_node): return False for predecessor in self.G2.pred[G2_node]: if predecessor in self.core_2: if not (self.core_2[predecessor] in self.G1.pred[G1_node]): return False elif self.G1.number_of_edges(self.core_2[predecessor], G1_node) != self.G2.number_of_edges(predecessor, G2_node): return False # R_succ # For each successor n' of n in the partial mapping, the corresponding # node m' is a successor of m, and vice versa. Also, the number of # edges must be equal. for successor in self.G1[G1_node]: if successor in self.core_1: if not (self.core_1[successor] in self.G2[G2_node]): return False elif self.G1.number_of_edges(G1_node, successor) != self.G2.number_of_edges(G2_node, self.core_1[successor]): return False for successor in self.G2[G2_node]: if successor in self.core_2: if not (self.core_2[successor] in self.G1[G1_node]): return False elif self.G1.number_of_edges(G1_node, self.core_2[successor]) != self.G2.number_of_edges(G2_node, successor): return False ### Look ahead 1 # R_termin # The number of predecessors of n that are in T_1^{in} is equal to the # number of predecessors of m that are in T_2^{in}. num1 = 0 for predecessor in self.G1.pred[G1_node]: if (predecessor in self.in_1) and (predecessor not in self.core_1): num1 += 1 num2 = 0 for predecessor in self.G2.pred[G2_node]: if (predecessor in self.in_2) and (predecessor not in self.core_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False # The number of successors of n that are in T_1^{in} is equal to the # number of successors of m that are in T_2^{in}. num1 = 0 for successor in self.G1[G1_node]: if (successor in self.in_1) and (successor not in self.core_1): num1 += 1 num2 = 0 for successor in self.G2[G2_node]: if (successor in self.in_2) and (successor not in self.core_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False # R_termout # The number of predecessors of n that are in T_1^{out} is equal to the # number of predecessors of m that are in T_2^{out}. num1 = 0 for predecessor in self.G1.pred[G1_node]: if (predecessor in self.out_1) and (predecessor not in self.core_1): num1 += 1 num2 = 0 for predecessor in self.G2.pred[G2_node]: if (predecessor in self.out_2) and (predecessor not in self.core_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False # The number of successors of n that are in T_1^{out} is equal to the # number of successors of m that are in T_2^{out}. num1 = 0 for successor in self.G1[G1_node]: if (successor in self.out_1) and (successor not in self.core_1): num1 += 1 num2 = 0 for successor in self.G2[G2_node]: if (successor in self.out_2) and (successor not in self.core_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False ### Look ahead 2 # R_new # The number of predecessors of n that are neither in the core_1 nor # T_1^{in} nor T_1^{out} is equal to the number of predecessors of m # that are neither in core_2 nor T_2^{in} nor T_2^{out}. num1 = 0 for predecessor in self.G1.pred[G1_node]: if (predecessor not in self.in_1) and (predecessor not in self.out_1): num1 += 1 num2 = 0 for predecessor in self.G2.pred[G2_node]: if (predecessor not in self.in_2) and (predecessor not in self.out_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False # The number of successors of n that are neither in the core_1 nor # T_1^{in} nor T_1^{out} is equal to the number of successors of m # that are neither in core_2 nor T_2^{in} nor T_2^{out}. num1 = 0 for successor in self.G1[G1_node]: if (successor not in self.in_1) and (successor not in self.out_1): num1 += 1 num2 = 0 for successor in self.G2[G2_node]: if (successor not in self.in_2) and (successor not in self.out_2): num2 += 1 if self.test == 'graph': if not (num1 == num2): return False else: # self.test == 'subgraph' if not (num1 >= num2): return False # Otherwise, this node pair is syntactically feasible! return True class GMState(object): """Internal representation of state for the GraphMatcher class. This class is used internally by the GraphMatcher class. It is used only to store state specific data. There will be at most G2.order() of these objects in memory at a time, due to the depth-first search strategy employed by the VF2 algorithm. """ def __init__(self, GM, G1_node=None, G2_node=None): """Initializes GMState object. Pass in the GraphMatcher to which this GMState belongs and the new node pair that will be added to the GraphMatcher's current isomorphism mapping. """ self.GM = GM # Initialize the last stored node pair. self.G1_node = None self.G2_node = None self.depth = len(GM.core_1) if G1_node is None or G2_node is None: # Then we reset the class variables GM.core_1 = {} GM.core_2 = {} GM.inout_1 = {} GM.inout_2 = {} # Watch out! G1_node == 0 should evaluate to True. if G1_node is not None and G2_node is not None: # Add the node pair to the isomorphism mapping. GM.core_1[G1_node] = G2_node GM.core_2[G2_node] = G1_node # Store the node that was added last. self.G1_node = G1_node self.G2_node = G2_node # Now we must update the other two vectors. # We will add only if it is not in there already! self.depth = len(GM.core_1) # First we add the new nodes... if G1_node not in GM.inout_1: GM.inout_1[G1_node] = self.depth if G2_node not in GM.inout_2: GM.inout_2[G2_node] = self.depth # Now we add every other node... # Updates for T_1^{inout} new_nodes = set([]) for node in GM.core_1: new_nodes.update([neighbor for neighbor in GM.G1[node] if neighbor not in GM.core_1]) for node in new_nodes: if node not in GM.inout_1: GM.inout_1[node] = self.depth # Updates for T_2^{inout} new_nodes = set([]) for node in GM.core_2: new_nodes.update([neighbor for neighbor in GM.G2[node] if neighbor not in GM.core_2]) for node in new_nodes: if node not in GM.inout_2: GM.inout_2[node] = self.depth def restore(self): """Deletes the GMState object and restores the class variables.""" # First we remove the node that was added from the core vectors. # Watch out! G1_node == 0 should evaluate to True. if self.G1_node is not None and self.G2_node is not None: del self.GM.core_1[self.G1_node] del self.GM.core_2[self.G2_node] # Now we revert the other two vectors. # Thus, we delete all entries which have this depth level. for vector in (self.GM.inout_1, self.GM.inout_2): for node in list(vector.keys()): if vector[node] == self.depth: del vector[node] class DiGMState(object): """Internal representation of state for the DiGraphMatcher class. This class is used internally by the DiGraphMatcher class. It is used only to store state specific data. There will be at most G2.order() of these objects in memory at a time, due to the depth-first search strategy employed by the VF2 algorithm. """ def __init__(self, GM, G1_node=None, G2_node=None): """Initializes DiGMState object. Pass in the DiGraphMatcher to which this DiGMState belongs and the new node pair that will be added to the GraphMatcher's current isomorphism mapping. """ self.GM = GM # Initialize the last stored node pair. self.G1_node = None self.G2_node = None self.depth = len(GM.core_1) if G1_node is None or G2_node is None: # Then we reset the class variables GM.core_1 = {} GM.core_2 = {} GM.in_1 = {} GM.in_2 = {} GM.out_1 = {} GM.out_2 = {} # Watch out! G1_node == 0 should evaluate to True. if G1_node is not None and G2_node is not None: # Add the node pair to the isomorphism mapping. GM.core_1[G1_node] = G2_node GM.core_2[G2_node] = G1_node # Store the node that was added last. self.G1_node = G1_node self.G2_node = G2_node # Now we must update the other four vectors. # We will add only if it is not in there already! self.depth = len(GM.core_1) # First we add the new nodes... for vector in (GM.in_1, GM.out_1): if G1_node not in vector: vector[G1_node] = self.depth for vector in (GM.in_2, GM.out_2): if G2_node not in vector: vector[G2_node] = self.depth # Now we add every other node... # Updates for T_1^{in} new_nodes = set([]) for node in GM.core_1: new_nodes.update([predecessor for predecessor in GM.G1.predecessors(node) if predecessor not in GM.core_1]) for node in new_nodes: if node not in GM.in_1: GM.in_1[node] = self.depth # Updates for T_2^{in} new_nodes = set([]) for node in GM.core_2: new_nodes.update([predecessor for predecessor in GM.G2.predecessors(node) if predecessor not in GM.core_2]) for node in new_nodes: if node not in GM.in_2: GM.in_2[node] = self.depth # Updates for T_1^{out} new_nodes = set([]) for node in GM.core_1: new_nodes.update([successor for successor in GM.G1.successors(node) if successor not in GM.core_1]) for node in new_nodes: if node not in GM.out_1: GM.out_1[node] = self.depth # Updates for T_2^{out} new_nodes = set([]) for node in GM.core_2: new_nodes.update([successor for successor in GM.G2.successors(node) if successor not in GM.core_2]) for node in new_nodes: if node not in GM.out_2: GM.out_2[node] = self.depth def restore(self): """Deletes the DiGMState object and restores the class variables.""" # First we remove the node that was added from the core vectors. # Watch out! G1_node == 0 should evaluate to True. if self.G1_node is not None and self.G2_node is not None: del self.GM.core_1[self.G1_node] del self.GM.core_2[self.G2_node] # Now we revert the other four vectors. # Thus, we delete all entries which have this depth level. for vector in (self.GM.in_1, self.GM.in_2, self.GM.out_1, self.GM.out_2): for node in list(vector.keys()): if vector[node] == self.depth: del vector[node] networkx-1.8.1/networkx/algorithms/isomorphism/matchhelpers.py0000664000175000017500000002767412177456333024751 0ustar aricaric00000000000000"""Functions which help end users define customize node_match and edge_match functions to use during isomorphism checks. """ from itertools import permutations import types import networkx as nx __all__ = ['categorical_node_match', 'categorical_edge_match', 'categorical_multiedge_match', 'numerical_node_match', 'numerical_edge_match', 'numerical_multiedge_match', 'generic_node_match', 'generic_edge_match', 'generic_multiedge_match', ] def copyfunc(f, name=None): """Returns a deepcopy of a function.""" try: return types.FunctionType(f.func_code, f.func_globals, name or f.name, f.func_defaults, f.func_closure) except AttributeError: return types.FunctionType(f.__code__, f.__globals__, name or f.name, f.__defaults__, f.__closure__) def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08): """Returns True if x and y are sufficiently close, elementwise. Parameters ---------- rtol : float The relative error tolerance. atol : float The absolute error tolerance. """ # assume finite weights, see numpy.allclose() for reference for xi, yi in zip(x,y): if not ( abs(xi-yi) <= atol + rtol * abs(yi) ): return False return True def close(x, y, rtol=1.0000000000000001e-05, atol=1e-08): """Returns True if x and y are sufficiently close. Parameters ---------- rtol : float The relative error tolerance. atol : float The absolute error tolerance. """ # assume finite weights, see numpy.allclose() for reference return abs(x-y) <= atol + rtol * abs(y) categorical_doc = """ Returns a comparison function for a categorical node attribute. The value(s) of the attr(s) must be hashable and comparable via the == operator since they are placed into a set([]) object. If the sets from G1 and G2 are the same, then the constructed function returns True. Parameters ---------- attr : string | list The categorical node attribute to compare, or a list of categorical node attributes to compare. default : value | list The default value for the categorical node attribute, or a list of default values for the categorical node attributes. Returns ------- match : function The customized, categorical `node_match` function. Examples -------- >>> import networkx.algorithms.isomorphism as iso >>> nm = iso.categorical_node_match('size', 1) >>> nm = iso.categorical_node_match(['color', 'size'], ['red', 2]) """ def categorical_node_match(attr, default): if nx.utils.is_string_like(attr): def match(data1, data2): return data1.get(attr, default) == data2.get(attr, default) else: attrs = list(zip(attr, default)) # Python 3 def match(data1, data2): values1 = set([data1.get(attr, d) for attr, d in attrs]) values2 = set([data2.get(attr, d) for attr, d in attrs]) return values1 == values2 return match categorical_edge_match = copyfunc(categorical_node_match, 'categorical_edge_match') def categorical_multiedge_match(attr, default): if nx.utils.is_string_like(attr): def match(datasets1, datasets2): values1 = set([data.get(attr, default) for data in datasets1.values()]) values2 = set([data.get(attr, default) for data in datasets2.values()]) return values1 == values2 else: attrs = list(zip(attr, default)) # Python 3 def match(datasets1, datasets2): values1 = set([]) for data1 in datasets1.values(): x = tuple( data1.get(attr, d) for attr, d in attrs ) values1.add(x) values2 = set([]) for data2 in datasets2.values(): x = tuple( data2.get(attr, d) for attr, d in attrs ) values2.add(x) return values1 == values2 return match # Docstrings for categorical functions. categorical_node_match.__doc__ = categorical_doc categorical_edge_match.__doc__ = categorical_doc.replace('node', 'edge') tmpdoc = categorical_doc.replace('node', 'edge') tmpdoc = tmpdoc.replace('categorical_edge_match', 'categorical_multiedge_match') categorical_multiedge_match.__doc__ = tmpdoc numerical_doc = """ Returns a comparison function for a numerical node attribute. The value(s) of the attr(s) must be numerical and sortable. If the sorted list of values from G1 and G2 are the same within some tolerance, then the constructed function returns True. Parameters ---------- attr : string | list The numerical node attribute to compare, or a list of numerical node attributes to compare. default : value | list The default value for the numerical node attribute, or a list of default values for the numerical node attributes. rtol : float The relative error tolerance. atol : float The absolute error tolerance. Returns ------- match : function The customized, numerical `node_match` function. Examples -------- >>> import networkx.algorithms.isomorphism as iso >>> nm = iso.numerical_node_match('weight', 1.0) >>> nm = iso.numerical_node_match(['weight', 'linewidth'], [.25, .5]) """ def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): if nx.utils.is_string_like(attr): def match(data1, data2): return close(data1.get(attr, default), data2.get(attr, default), rtol=rtol, atol=atol) else: attrs = list(zip(attr, default)) # Python 3 def match(data1, data2): values1 = [data1.get(attr, d) for attr, d in attrs] values2 = [data2.get(attr, d) for attr, d in attrs] return allclose(values1, values2, rtol=rtol, atol=atol) return match numerical_edge_match = copyfunc(numerical_node_match, 'numerical_edge_match') def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08): if nx.utils.is_string_like(attr): def match(datasets1, datasets2): values1 = sorted([data.get(attr, default) for data in datasets1.values()]) values2 = sorted([data.get(attr, default) for data in datasets2.values()]) return allclose(values1, values2, rtol=rtol, atol=atol) else: attrs = list(zip(attr, default)) # Python 3 def match(datasets1, datasets2): values1 = [] for data1 in datasets1.values(): x = tuple( data1.get(attr, d) for attr, d in attrs ) values1.append(x) values2 = [] for data2 in datasets2.values(): x = tuple( data2.get(attr, d) for attr, d in attrs ) values2.append(x) values1.sort() values2.sort() for xi, yi in zip(values1, values2): if not allclose(xi, yi, rtol=rtol, atol=atol): return False else: return True return match # Docstrings for numerical functions. numerical_node_match.__doc__ = numerical_doc numerical_edge_match.__doc__ = numerical_doc.replace('node', 'edge') tmpdoc = numerical_doc.replace('node', 'edge') tmpdoc = tmpdoc.replace('numerical_edge_match', 'numerical_multiedge_match') numerical_multiedge_match.__doc__ = tmpdoc generic_doc = """ Returns a comparison function for a generic attribute. The value(s) of the attr(s) are compared using the specified operators. If all the attributes are equal, then the constructed function returns True. Parameters ---------- attr : string | list The node attribute to compare, or a list of node attributes to compare. default : value | list The default value for the node attribute, or a list of default values for the node attributes. op : callable | list The operator to use when comparing attribute values, or a list of operators to use when comparing values for each attribute. Returns ------- match : function The customized, generic `node_match` function. Examples -------- >>> from operator import eq >>> from networkx.algorithms.isomorphism.matchhelpers import close >>> from networkx.algorithms.isomorphism import generic_node_match >>> nm = generic_node_match('weight', 1.0, close) >>> nm = generic_node_match('color', 'red', eq) >>> nm = generic_node_match(['weight', 'color'], [1.0, 'red'], [close, eq]) """ def generic_node_match(attr, default, op): if nx.utils.is_string_like(attr): def match(data1, data2): return op(data1.get(attr, default), data2.get(attr, default)) else: attrs = list(zip(attr, default, op)) # Python 3 def match(data1, data2): for attr, d, operator in attrs: if not operator(data1.get(attr, d), data2.get(attr, d)): return False else: return True return match generic_edge_match = copyfunc(generic_node_match, 'generic_edge_match') def generic_multiedge_match(attr, default, op): """Returns a comparison function for a generic attribute. The value(s) of the attr(s) are compared using the specified operators. If all the attributes are equal, then the constructed function returns True. Potentially, the constructed edge_match function can be slow since it must verify that no isomorphism exists between the multiedges before it returns False. Parameters ---------- attr : string | list The edge attribute to compare, or a list of node attributes to compare. default : value | list The default value for the edge attribute, or a list of default values for the dgeattributes. op : callable | list The operator to use when comparing attribute values, or a list of operators to use when comparing values for each attribute. Returns ------- match : function The customized, generic `edge_match` function. Examples -------- >>> from operator import eq >>> from networkx.algorithms.isomorphism.matchhelpers import close >>> from networkx.algorithms.isomorphism import generic_node_match >>> nm = generic_node_match('weight', 1.0, close) >>> nm = generic_node_match('color', 'red', eq) >>> nm = generic_node_match(['weight', 'color'], ... [1.0, 'red'], ... [close, eq]) ... """ # This is slow, but generic. # We must test every possible isomorphism between the edges. if nx.utils.is_string_like(attr): def match(datasets1, datasets2): values1 = [data.get(attr, default) for data in datasets1.values()] values2 = [data.get(attr, default) for data in datasets2.values()] for vals2 in permutations(values2): for xi, yi in zip(values1, vals2): if not op(xi, yi): # This is not an isomorphism, go to next permutation. break else: # Then we found an isomorphism. return True else: # Then there are no isomorphisms between the multiedges. return False else: attrs = list(zip(attr, default)) # Python 3 def match(datasets1, datasets2): values1 = [] for data1 in datasets1.values(): x = tuple( data1.get(attr, d) for attr, d in attrs ) values1.append(x) values2 = [] for data2 in datasets2.values(): x = tuple( data2.get(attr, d) for attr, d in attrs ) values2.append(x) for vals2 in permutations(values2): for xi, yi, operator in zip(values1, vals2, op): if not operator(xi, yi): return False else: return True return match # Docstrings for numerical functions. generic_node_match.__doc__ = generic_doc generic_edge_match.__doc__ = generic_doc.replace('node', 'edge') networkx-1.8.1/networkx/algorithms/link_analysis/0000775000175000017500000000000012177457361022171 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/link_analysis/pagerank_alg.py0000664000175000017500000003111112177456333025151 0ustar aricaric00000000000000"""PageRank analysis of graph structure. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # NetworkX:http://networkx.lanl.gov/ import networkx as nx from networkx.exception import NetworkXError __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['pagerank','pagerank_numpy','pagerank_scipy','google_matrix'] def pagerank(G,alpha=0.85,personalization=None, max_iter=100,tol=1.0e-8,nstart=None,weight='weight'): """Return the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on the structure of the incoming links. It was originally designed as an algorithm to rank web pages. Parameters ----------- G : graph A NetworkX graph alpha : float, optional Damping parameter for PageRank, default=0.85 personalization: dict, optional The "personalization vector" consisting of a dictionary with a key for every graph node and nonzero personalization value for each node. max_iter : integer, optional Maximum number of iterations in power method eigenvalue solver. tol : float, optional Error tolerance used to check convergence in power method solver. nstart : dictionary, optional Starting value of PageRank iteration for each node. weight : key, optional Edge data key to use as weight. If None weights are set to 1. Returns ------- pagerank : dictionary Dictionary of nodes with PageRank as value Examples -------- >>> G=nx.DiGraph(nx.path_graph(4)) >>> pr=nx.pagerank(G,alpha=0.9) Notes ----- The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. The PageRank algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs by converting each oriented edge in the directed graph to two edges. See Also -------- pagerank_numpy, pagerank_scipy, google_matrix References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry, The PageRank citation ranking: Bringing order to the Web. 1999 http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf """ if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise Exception("pagerank() not defined for graphs with multiedges.") if len(G) == 0: return {} if not G.is_directed(): D=G.to_directed() else: D=G # create a copy in (right) stochastic form W=nx.stochastic_graph(D, weight=weight) scale=1.0/W.number_of_nodes() # choose fixed starting vector if not given if nstart is None: x=dict.fromkeys(W,scale) else: x=nstart # normalize starting vector to 1 s=1.0/sum(x.values()) for k in x: x[k]*=s # assign uniform personalization/teleportation vector if not given if personalization is None: p=dict.fromkeys(W,scale) else: p=personalization # normalize starting vector to 1 s=1.0/sum(p.values()) for k in p: p[k]*=s if set(p)!=set(G): raise NetworkXError('Personalization vector ' 'must have a value for every node') # "dangling" nodes, no links out from them out_degree=W.out_degree() dangle=[n for n in W if out_degree[n]==0.0] i=0 while True: # power iteration: make up to max_iter iterations xlast=x x=dict.fromkeys(xlast.keys(),0) danglesum=alpha*scale*sum(xlast[n] for n in dangle) for n in x: # this matrix multiply looks odd because it is # doing a left multiply x^T=xlast^T*W for nbr in W[n]: x[nbr]+=alpha*xlast[n]*W[n][nbr][weight] x[n]+=danglesum+(1.0-alpha)*p[n] # normalize vector s=1.0/sum(x.values()) for n in x: x[n]*=s # check convergence, l1 norm err=sum([abs(x[n]-xlast[n]) for n in x]) if err < tol: break if i>max_iter: raise NetworkXError('pagerank: power iteration failed to converge ' 'in %d iterations.'%(i-1)) i+=1 return x def google_matrix(G, alpha=0.85, personalization=None, nodelist=None, weight='weight'): """Return the Google matrix of the graph. Parameters ----------- G : graph A NetworkX graph alpha : float The damping factor personalization: dict, optional The "personalization vector" consisting of a dictionary with a key for every graph node and nonzero personalization value for each node. nodelist : list, optional The rows and columns are ordered according to the nodes in nodelist. If nodelist is None, then the ordering is produced by G.nodes(). weight : key, optional Edge data key to use as weight. If None weights are set to 1. Returns ------- A : NumPy matrix Google matrix of the graph See Also -------- pagerank, pagerank_numpy, pagerank_scipy """ try: import numpy as np except ImportError: raise ImportError(\ "google_matrix() requires NumPy: http://scipy.org/") # choose ordering in matrix if personalization is None: # use G.nodes() ordering nodelist=G.nodes() else: # use personalization "vector" ordering nodelist=personalization.keys() if set(nodelist)!=set(G): raise NetworkXError('Personalization vector dictionary' 'must have a value for every node') M=nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight) (n,m)=M.shape # should be square if n == 0: return M # add constant to dangling nodes' row dangling=np.where(M.sum(axis=1)==0) for d in dangling[0]: M[d]=1.0/n # normalize M=M/M.sum(axis=1) # add "teleportation"/personalization e=np.ones((n)) if personalization is not None: v=np.array(list(personalization.values()),dtype=float) else: v=e v=v/v.sum() P=alpha*M+(1-alpha)*np.outer(e,v) return P def pagerank_numpy(G, alpha=0.85, personalization=None, weight='weight'): """Return the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on the structure of the incoming links. It was originally designed as an algorithm to rank web pages. Parameters ----------- G : graph A NetworkX graph alpha : float, optional Damping parameter for PageRank, default=0.85 personalization: dict, optional The "personalization vector" consisting of a dictionary with a key for every graph node and nonzero personalization value for each node. weight : key, optional Edge data key to use as weight. If None weights are set to 1. Returns ------- pagerank : dictionary Dictionary of nodes with PageRank as value Examples -------- >>> G=nx.DiGraph(nx.path_graph(4)) >>> pr=nx.pagerank_numpy(G,alpha=0.9) Notes ----- The eigenvector calculation uses NumPy's interface to the LAPACK eigenvalue solvers. This will be the fastest and most accurate for small graphs. This implementation works with Multi(Di)Graphs. See Also -------- pagerank, pagerank_scipy, google_matrix References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry, The PageRank citation ranking: Bringing order to the Web. 1999 http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf """ try: import numpy as np except ImportError: raise ImportError("pagerank_numpy() requires NumPy: http://scipy.org/") if len(G) == 0: return {} # choose ordering in matrix if personalization is None: # use G.nodes() ordering nodelist=G.nodes() else: # use personalization "vector" ordering nodelist=personalization.keys() M=google_matrix(G, alpha, personalization=personalization, nodelist=nodelist, weight=weight) # use numpy LAPACK solver eigenvalues,eigenvectors=np.linalg.eig(M.T) ind=eigenvalues.argsort() # eigenvector of largest eigenvalue at ind[-1], normalized largest=np.array(eigenvectors[:,ind[-1]]).flatten().real norm=float(largest.sum()) centrality=dict(zip(nodelist,map(float,largest/norm))) return centrality def pagerank_scipy(G, alpha=0.85, personalization=None, max_iter=100, tol=1.0e-6, weight='weight'): """Return the PageRank of the nodes in the graph. PageRank computes a ranking of the nodes in the graph G based on the structure of the incoming links. It was originally designed as an algorithm to rank web pages. Parameters ----------- G : graph A NetworkX graph alpha : float, optional Damping parameter for PageRank, default=0.85 personalization: dict, optional The "personalization vector" consisting of a dictionary with a key for every graph node and nonzero personalization value for each node. max_iter : integer, optional Maximum number of iterations in power method eigenvalue solver. tol : float, optional Error tolerance used to check convergence in power method solver. weight : key, optional Edge data key to use as weight. If None weights are set to 1. Returns ------- pagerank : dictionary Dictionary of nodes with PageRank as value Examples -------- >>> G=nx.DiGraph(nx.path_graph(4)) >>> pr=nx.pagerank_scipy(G,alpha=0.9) Notes ----- The eigenvector calculation uses power iteration with a SciPy sparse matrix representation. See Also -------- pagerank, pagerank_numpy, google_matrix References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Page, Lawrence; Brin, Sergey; Motwani, Rajeev and Winograd, Terry, The PageRank citation ranking: Bringing order to the Web. 1999 http://dbpubs.stanford.edu:8090/pub/showDoc.Fulltext?lang=en&doc=1999-66&format=pdf """ try: import scipy.sparse except ImportError: raise ImportError("pagerank_scipy() requires SciPy: http://scipy.org/") if len(G) == 0: return {} # choose ordering in matrix if personalization is None: # use G.nodes() ordering nodelist=G.nodes() else: # use personalization "vector" ordering nodelist=personalization.keys() M=nx.to_scipy_sparse_matrix(G,nodelist=nodelist,weight=weight,dtype='f') (n,m)=M.shape # should be square S=scipy.array(M.sum(axis=1)).flatten() # for i, j, v in zip( *scipy.sparse.find(M) ): # M[i,j] = v / S[i] S[S>0] = 1.0 / S[S>0] Q = scipy.sparse.spdiags(S.T, 0, *M.shape, format='csr') M = Q * M x=scipy.ones((n))/n # initial guess dangle=scipy.array(scipy.where(M.sum(axis=1)==0,1.0/n,0)).flatten() # add "teleportation"/personalization if personalization is not None: v=scipy.array(list(personalization.values()),dtype=float) v=v/v.sum() else: v=x i=0 while i <= max_iter: # power iteration: make up to max_iter iterations xlast=x x=alpha*(x*M+scipy.dot(dangle,xlast))+(1-alpha)*v x=x/x.sum() # check convergence, l1 norm err=scipy.absolute(x-xlast).sum() if err < n*tol: return dict(zip(nodelist,map(float,x))) i+=1 raise NetworkXError('pagerank_scipy: power iteration failed to converge' 'in %d iterations.'%(i+1)) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/link_analysis/__init__.py0000664000175000017500000000016612177456333024303 0ustar aricaric00000000000000from networkx.algorithms.link_analysis.pagerank_alg import * from networkx.algorithms.link_analysis.hits_alg import * networkx-1.8.1/networkx/algorithms/link_analysis/tests/0000775000175000017500000000000012177457361023333 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/link_analysis/tests/test_pagerank.py0000664000175000017500000000761612177456333026544 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest from nose.plugins.attrib import attr import random import networkx # Example from # A. Langville and C. Meyer, "A survey of eigenvector methods of web # information retrieval." http://citeseer.ist.psu.edu/713792.html class TestPageRank: def setUp(self): G=networkx.DiGraph() edges=[(1,2),(1,3),\ (3,1),(3,2),(3,5),\ (4,5),(4,6),\ (5,4),(5,6),\ (6,4)] G.add_edges_from(edges) self.G=G self.G.pagerank=dict(zip(G, [0.03721197,0.05395735,0.04150565, 0.37508082,0.20599833, 0.28624589])) def test_pagerank(self): G=self.G p=networkx.pagerank(G,alpha=0.9,tol=1.e-08) for n in G: assert_almost_equal(p[n],G.pagerank[n],places=4) nstart = dict((n,random.random()) for n in G) p=networkx.pagerank(G,alpha=0.9,tol=1.e-08, nstart=nstart) for n in G: assert_almost_equal(p[n],G.pagerank[n],places=4) assert_raises(networkx.NetworkXError,networkx.pagerank,G, max_iter=0) @attr('numpy') def test_numpy_pagerank(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') G=self.G p=networkx.pagerank_numpy(G,alpha=0.9) for n in G: assert_almost_equal(p[n],G.pagerank[n],places=4) personalize = dict((n,random.random()) for n in G) p=networkx.pagerank_numpy(G,alpha=0.9, personalization=personalize) @attr('numpy') def test_google_matrix(self): try: import numpy.linalg except ImportError: raise SkipTest('numpy not available.') G=self.G M=networkx.google_matrix(G,alpha=0.9) e,ev=numpy.linalg.eig(M.T) p=numpy.array(ev[:,0]/ev[:,0].sum())[:,0] for (a,b) in zip(p,self.G.pagerank.values()): assert_almost_equal(a,b) personalize = dict((n,random.random()) for n in G) M=networkx.google_matrix(G,alpha=0.9, personalization=personalize) _ = personalize.pop(1) assert_raises(networkx.NetworkXError,networkx.google_matrix,G, personalization=personalize) def test_scipy_pagerank(self): G=self.G try: import scipy except ImportError: raise SkipTest('scipy not available.') p=networkx.pagerank_scipy(G,alpha=0.9,tol=1.e-08) for n in G: assert_almost_equal(p[n],G.pagerank[n],places=4) personalize = dict((n,random.random()) for n in G) p=networkx.pagerank_scipy(G,alpha=0.9,tol=1.e-08, personalization=personalize) assert_raises(networkx.NetworkXError,networkx.pagerank_scipy,G, max_iter=0) def test_personalization(self): G=networkx.complete_graph(4) personalize={0:1,1:1,2:4,3:4} answer={0:0.1,1:0.1,2:0.4,3:0.4} p=networkx.pagerank(G,alpha=0.0,personalization=personalize) for n in G: assert_almost_equal(p[n],answer[n],places=4) _ = personalize.pop(0) assert_raises(networkx.NetworkXError,networkx.pagerank,G, personalization=personalize) @attr('numpy') def test_empty(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') G=networkx.Graph() assert_equal(networkx.pagerank(G),{}) assert_equal(networkx.pagerank_numpy(G),{}) assert_equal(networkx.google_matrix(G).shape,(0,0)) def test_empty_scipy(self): try: import scipy except ImportError: raise SkipTest('scipy not available.') G=networkx.Graph() assert_equal(networkx.pagerank_scipy(G),{}) networkx-1.8.1/networkx/algorithms/link_analysis/tests/test_hits.py0000664000175000017500000000476012177456333025720 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest from nose.plugins.attrib import attr import networkx # Example from # A. Langville and C. Meyer, "A survey of eigenvector methods of web # information retrieval." http://citeseer.ist.psu.edu/713792.html class TestHITS: def setUp(self): G=networkx.DiGraph() edges=[(1,3),(1,5),\ (2,1),\ (3,5),\ (5,4),(5,3),\ (6,5)] G.add_edges_from(edges,weight=1) self.G=G self.G.a=dict(zip(G,[0.000000, 0.000000, 0.366025, 0.133975, 0.500000, 0.000000])) self.G.h=dict(zip(G,[ 0.366025, 0.000000, 0.211325, 0.000000, 0.211325, 0.211325])) def test_hits(self): G=self.G h,a=networkx.hits(G,tol=1.e-08) for n in G: assert_almost_equal(h[n],G.h[n],places=4) for n in G: assert_almost_equal(a[n],G.a[n],places=4) def test_hits_nstart(self): G = self.G nstart = dict([(i, 1./2) for i in G]) h, a = networkx.hits(G, nstart = nstart) @attr('numpy') def test_hits_numpy(self): try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') G=self.G h,a=networkx.hits_numpy(G) for n in G: assert_almost_equal(h[n],G.h[n],places=4) for n in G: assert_almost_equal(a[n],G.a[n],places=4) def test_hits_scipy(self): try: import scipy as sp except ImportError: raise SkipTest('SciPy not available.') G=self.G h,a=networkx.hits_scipy(G,tol=1.e-08) for n in G: assert_almost_equal(h[n],G.h[n],places=4) for n in G: assert_almost_equal(a[n],G.a[n],places=4) @attr('numpy') def test_empty(self): try: import numpy except ImportError: raise SkipTest('numpy not available.') G=networkx.Graph() assert_equal(networkx.hits(G),({},{})) assert_equal(networkx.hits_numpy(G),({},{})) assert_equal(networkx.authority_matrix(G).shape,(0,0)) assert_equal(networkx.hub_matrix(G).shape,(0,0)) def test_empty_scipy(self): try: import scipy except ImportError: raise SkipTest('scipy not available.') G=networkx.Graph() assert_equal(networkx.hits_scipy(G),({},{})) networkx-1.8.1/networkx/algorithms/link_analysis/hits_alg.py0000664000175000017500000002233612177456333024341 0ustar aricaric00000000000000"""Hubs and authorities analysis of graph structure. """ # Copyright (C) 2008-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # NetworkX:http://networkx.lanl.gov/ import networkx as nx from networkx.exception import NetworkXError __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['hits','hits_numpy','hits_scipy','authority_matrix','hub_matrix'] def hits(G,max_iter=100,tol=1.0e-8,nstart=None,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ---------- G : graph A NetworkX graph max_iter : interger, optional Maximum number of iterations in power method. tol : float, optional Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of each node for power method iteration. normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-32, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise Exception("hits() not defined for graphs with multiedges.") if len(G) == 0: return {},{} # choose fixed starting vector if not given if nstart is None: h=dict.fromkeys(G,1.0/G.number_of_nodes()) else: h=nstart # normalize starting vector s=1.0/sum(h.values()) for k in h: h[k]*=s i=0 while True: # power iteration: make up to max_iter iterations hlast=h h=dict.fromkeys(hlast.keys(),0) a=dict.fromkeys(hlast.keys(),0) # this "matrix multiply" looks odd because it is # doing a left multiply a^T=hlast^T*G for n in h: for nbr in G[n]: a[nbr]+=hlast[n]*G[n][nbr].get('weight',1) # now multiply h=Ga for n in h: for nbr in G[n]: h[n]+=a[nbr]*G[n][nbr].get('weight',1) # normalize vector s=1.0/max(h.values()) for n in h: h[n]*=s # normalize vector s=1.0/max(a.values()) for n in a: a[n]*=s # check convergence, l1 norm err=sum([abs(h[n]-hlast[n]) for n in h]) if err < tol: break if i>max_iter: raise NetworkXError(\ "HITS: power iteration failed to converge in %d iterations."%(i+1)) i+=1 if normalized: s = 1.0/sum(a.values()) for n in a: a[n] *= s s = 1.0/sum(h.values()) for n in h: h[n] *= s return h,a def authority_matrix(G,nodelist=None): """Return the HITS authority matrix.""" M=nx.to_numpy_matrix(G,nodelist=nodelist) return M.T*M def hub_matrix(G,nodelist=None): """Return the HITS hub matrix.""" M=nx.to_numpy_matrix(G,nodelist=nodelist) return M*M.T def hits_numpy(G,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ----------- G : graph A NetworkX graph normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- The eigenvector calculation uses NumPy's interface to LAPACK. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-32, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: import numpy as np except ImportError: raise ImportError(\ "hits_numpy() requires NumPy: http://scipy.org/") if len(G) == 0: return {},{} H=nx.hub_matrix(G,G.nodes()) e,ev=np.linalg.eig(H) m=e.argsort()[-1] # index of maximum eigenvalue h=np.array(ev[:,m]).flatten() A=nx.authority_matrix(G,G.nodes()) e,ev=np.linalg.eig(A) m=e.argsort()[-1] # index of maximum eigenvalue a=np.array(ev[:,m]).flatten() if normalized: h = h/h.sum() a = a/a.sum() else: h = h/h.max() a = a/a.max() hubs=dict(zip(G.nodes(),map(float,h))) authorities=dict(zip(G.nodes(),map(float,a))) return hubs,authorities def hits_scipy(G,max_iter=100,tol=1.0e-6,normalized=True): """Return HITS hubs and authorities values for nodes. The HITS algorithm computes two numbers for a node. Authorities estimates the node value based on the incoming links. Hubs estimates the node value based on outgoing links. Parameters ----------- G : graph A NetworkX graph max_iter : interger, optional Maximum number of iterations in power method. tol : float, optional Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of each node for power method iteration. normalized : bool (default=True) Normalize results by the sum of all of the values. Returns ------- (hubs,authorities) : two-tuple of dictionaries Two dictionaries keyed by node containing the hub and authority values. Examples -------- >>> G=nx.path_graph(4) >>> h,a=nx.hits(G) Notes ----- This implementation uses SciPy sparse matrices. The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. The HITS algorithm was designed for directed graphs but this algorithm does not check if the input graph is directed and will execute on undirected graphs. References ---------- .. [1] A. Langville and C. Meyer, "A survey of eigenvector methods of web information retrieval." http://citeseer.ist.psu.edu/713792.html .. [2] Jon Kleinberg, Authoritative sources in a hyperlinked environment Journal of the ACM 46 (5): 604-632, 1999. doi:10.1145/324133.324140. http://www.cs.cornell.edu/home/kleinber/auth.pdf. """ try: import scipy.sparse import numpy as np except ImportError: raise ImportError(\ "hits_scipy() requires SciPy: http://scipy.org/") if len(G) == 0: return {},{} M=nx.to_scipy_sparse_matrix(G,nodelist=G.nodes()) (n,m)=M.shape # should be square A=M.T*M # authority matrix x=scipy.ones((n,1))/n # initial guess # power iteration on authority matrix i=0 while True: xlast=x x=A*x x=x/x.max() # check convergence, l1 norm err=scipy.absolute(x-xlast).sum() if err < tol: break if i>max_iter: raise NetworkXError(\ "HITS: power iteration failed to converge in %d iterations."%(i+1)) i+=1 a=np.asarray(x).flatten() # h=M*a h=np.asarray(M*a).flatten() if normalized: h = h/h.sum() a = a/a.sum() hubs=dict(zip(G.nodes(),map(float,h))) authorities=dict(zip(G.nodes(),map(float,a))) return hubs,authorities # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/euler.py0000664000175000017500000000672712177456333021031 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Eulerian circuits and graphs. """ import networkx as nx __author__ = """\n""".join(['Nima Mohammadi (nima.irt[AT]gmail.com)', 'Aric Hagberg ']) # Copyright (C) 2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['is_eulerian', 'eulerian_circuit'] def is_eulerian(G): """Return True if G is an Eulerian graph, False otherwise. An Eulerian graph is a graph with an Eulerian circuit. Parameters ---------- G : graph A NetworkX Graph Examples -------- >>> nx.is_eulerian(nx.DiGraph({0:[3], 1:[2], 2:[3], 3:[0, 1]})) True >>> nx.is_eulerian(nx.complete_graph(5)) True >>> nx.is_eulerian(nx.petersen_graph()) False Notes ----- This implementation requires the graph to be connected (or strongly connected for directed graphs). """ if G.is_directed(): # Every node must have equal in degree and out degree for n in G.nodes_iter(): if G.in_degree(n) != G.out_degree(n): return False # Must be strongly connected if not nx.is_strongly_connected(G): return False else: # An undirected Eulerian graph has no vertices of odd degrees for v,d in G.degree_iter(): if d % 2 != 0: return False # Must be connected if not nx.is_connected(G): return False return True def eulerian_circuit(G, source=None): """Return the edges of an Eulerian circuit in G. An Eulerian circuit is a path that crosses every edge in G exactly once and finishes at the starting node. Parameters ---------- G : graph A NetworkX Graph source : node, optional Starting node for circuit. Returns ------- edges : generator A generator that produces edges in the Eulerian circuit. Raises ------ NetworkXError If the graph is not Eulerian. See Also -------- is_eulerian Notes ----- Uses Fleury's algorithm [1]_,[2]_ References ---------- .. [1] Fleury, "Deux problemes de geometrie de situation", Journal de mathematiques elementaires (1883), 257-261. .. [2] http://en.wikipedia.org/wiki/Eulerian_path Examples -------- >>> G=nx.complete_graph(3) >>> list(nx.eulerian_circuit(G)) [(0, 1), (1, 2), (2, 0)] >>> list(nx.eulerian_circuit(G,source=1)) [(1, 0), (0, 2), (2, 1)] >>> [u for u,v in nx.eulerian_circuit(G)] # nodes in circuit [0, 1, 2] """ if not is_eulerian(G): raise nx.NetworkXError("G is not Eulerian.") g = G.__class__(G) # copy graph structure (not attributes) # set starting node if source is None: v = next(g.nodes_iter()) else: v = source while g.size() > 0: n = v # sort nbrs here to provide stable ordering of alternate cycles nbrs = sorted([v for u,v in g.edges(n)]) for v in nbrs: g.remove_edge(n,v) bridge = not nx.is_connected(g.to_undirected()) if bridge: g.add_edge(n,v) # add this edge back and try another else: break # this edge is good, break the for loop if bridge: g.remove_edge(n,v) g.remove_node(n) yield (n,v) networkx-1.8.1/networkx/algorithms/community/0000775000175000017500000000000012177457361021355 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/community/__init__.py0000664000175000017500000000006412177456333023464 0ustar aricaric00000000000000from networkx.algorithms.community.kclique import * networkx-1.8.1/networkx/algorithms/community/tests/0000775000175000017500000000000012177457361022517 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/community/tests/test_kclique.py0000664000175000017500000000415012177456333025563 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from itertools import combinations from networkx import k_clique_communities def test_overlaping_K5(): G = nx.Graph() G.add_edges_from(combinations(range(5), 2)) # Add a five clique G.add_edges_from(combinations(range(2,7), 2)) # Add another five clique c = list(nx.k_clique_communities(G, 4)) assert_equal(c,[frozenset([0, 1, 2, 3, 4, 5, 6])]) c= list(nx.k_clique_communities(G, 5)) assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([2,3,4,5,6])])) def test_isolated_K5(): G = nx.Graph() G.add_edges_from(combinations(range(0,5), 2)) # Add a five clique G.add_edges_from(combinations(range(5,10), 2)) # Add another five clique c= list(nx.k_clique_communities(G, 5)) assert_equal(set(c),set([frozenset([0,1,2,3,4]),frozenset([5,6,7,8,9])])) def test_zachary(): z = nx.karate_club_graph() # clique percolation with k=2 is just connected components zachary_k2_ground_truth = set([frozenset(z.nodes())]) zachary_k3_ground_truth = set([frozenset([0, 1, 2, 3, 7, 8, 12, 13, 14, 15, 17, 18, 19, 20, 21, 22, 23, 26, 27, 28, 29, 30, 31, 32, 33]), frozenset([0, 4, 5, 6, 10, 16]), frozenset([24, 25, 31])]) zachary_k4_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13]), frozenset([8, 32, 30, 33]), frozenset([32, 33, 29, 23])]) zachary_k5_ground_truth = set([frozenset([0, 1, 2, 3, 7, 13])]) zachary_k6_ground_truth = set([]) assert set(k_clique_communities(z, 2)) == zachary_k2_ground_truth assert set(k_clique_communities(z, 3)) == zachary_k3_ground_truth assert set(k_clique_communities(z, 4)) == zachary_k4_ground_truth assert set(k_clique_communities(z, 5)) == zachary_k5_ground_truth assert set(k_clique_communities(z, 6)) == zachary_k6_ground_truth @raises(nx.NetworkXError) def test_bad_k(): c = list(k_clique_communities(nx.Graph(),1)) networkx-1.8.1/networkx/algorithms/community/kclique.py0000664000175000017500000000524012177456333023363 0ustar aricaric00000000000000#-*- coding: utf-8 -*- # Copyright (C) 2011 by # Conrad Lee # Aric Hagberg # All rights reserved. # BSD license. from collections import defaultdict import networkx as nx __author__ = """\n""".join(['Conrad Lee ', 'Aric Hagberg ']) __all__ = ['k_clique_communities'] def k_clique_communities(G, k, cliques=None): """Find k-clique communities in graph using the percolation method. A k-clique community is the union of all cliques of size k that can be reached through adjacent (sharing k-1 nodes) k-cliques. Parameters ---------- G : NetworkX graph k : int Size of smallest clique cliques: list or generator Precomputed cliques (use networkx.find_cliques(G)) Returns ------- Yields sets of nodes, one for each k-clique community. Examples -------- >>> G = nx.complete_graph(5) >>> K5 = nx.convert_node_labels_to_integers(G,first_label=2) >>> G.add_edges_from(K5.edges()) >>> c = list(nx.k_clique_communities(G, 4)) >>> list(c[0]) [0, 1, 2, 3, 4, 5, 6] >>> list(nx.k_clique_communities(G, 6)) [] References ---------- .. [1] Gergely Palla, Imre Derényi, Illés Farkas1, and Tamás Vicsek, Uncovering the overlapping community structure of complex networks in nature and society Nature 435, 814-818, 2005, doi:10.1038/nature03607 """ if k < 2: raise nx.NetworkXError("k=%d, k must be greater than 1."%k) if cliques is None: cliques = nx.find_cliques(G) cliques = [frozenset(c) for c in cliques if len(c) >= k] # First index which nodes are in which cliques membership_dict = defaultdict(list) for clique in cliques: for node in clique: membership_dict[node].append(clique) # For each clique, see which adjacent cliques percolate perc_graph = nx.Graph() perc_graph.add_nodes_from(cliques) for clique in cliques: for adj_clique in _get_adjacent_cliques(clique, membership_dict): if len(clique.intersection(adj_clique)) >= (k - 1): perc_graph.add_edge(clique, adj_clique) # Connected components of clique graph with perc edges # are the percolated cliques for component in nx.connected_components(perc_graph): yield(frozenset.union(*component)) def _get_adjacent_cliques(clique, membership_dict): adjacent_cliques = set() for n in clique: for adj_clique in membership_dict[n]: if clique != adj_clique: adjacent_cliques.add(adj_clique) return adjacent_cliques networkx-1.8.1/networkx/algorithms/mis.py0000664000175000017500000000466112177456333020500 0ustar aricaric00000000000000# -*- coding: utf-8 -*- # $Id: maximalIndependentSet.py 576 2011-03-01 05:50:34Z lleeoo $ """ Algorithm to find a maximal (not maximum) independent set. """ # Leo Lopes # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = "\n".join(["Leo Lopes ", "Loïc Séguin-C. "]) __all__ = ['maximal_independent_set'] import random import networkx as nx def maximal_independent_set(G, nodes=None): """Return a random maximal independent set guaranteed to contain a given set of nodes. An independent set is a set of nodes such that the subgraph of G induced by these nodes contains no edges. A maximal independent set is an independent set such that it is not possible to add a new node and still get an independent set. Parameters ---------- G : NetworkX graph nodes : list or iterable Nodes that must be part of the independent set. This set of nodes must be independent. Returns ------- indep_nodes : list List of nodes that are part of a maximal independent set. Raises ------ NetworkXUnfeasible If the nodes in the provided list are not part of the graph or do not form an independent set, an exception is raised. Examples -------- >>> G = nx.path_graph(5) >>> nx.maximal_independent_set(G) # doctest: +SKIP [4, 0, 2] >>> nx.maximal_independent_set(G, [1]) # doctest: +SKIP [1, 3] Notes ------ This algorithm does not solve the maximum independent set problem. """ if not nodes: nodes = set([random.choice(G.nodes())]) else: nodes = set(nodes) if not nodes.issubset(G): raise nx.NetworkXUnfeasible( "%s is not a subset of the nodes of G" % nodes) neighbors = set.union(*[set(G.neighbors(v)) for v in nodes]) if set.intersection(neighbors, nodes): raise nx.NetworkXUnfeasible( "%s is not an independent set of G" % nodes) indep_nodes = list(nodes) available_nodes = set(G.nodes()).difference(neighbors.union(nodes)) while available_nodes: node = random.choice(list(available_nodes)) indep_nodes.append(node) available_nodes.difference_update(G.neighbors(node) + [node]) return indep_nodes networkx-1.8.1/networkx/algorithms/isolate.py0000664000175000017500000000320512177456333021341 0ustar aricaric00000000000000# encoding: utf-8 """ Functions for identifying isolate (degree zero) nodes. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Drew Conway ', 'Aric Hagberg ']) __all__=['is_isolate','isolates'] def is_isolate(G,n): """Determine of node n is an isolate (degree zero). Parameters ---------- G : graph A networkx graph n : node A node in G Returns ------- isolate : bool True if n has no neighbors, False otherwise. Examples -------- >>> G=nx.Graph() >>> G.add_edge(1,2) >>> G.add_node(3) >>> nx.is_isolate(G,2) False >>> nx.is_isolate(G,3) True """ return G.degree(n)==0 def isolates(G): """Return list of isolates in the graph. Isolates are nodes with no neighbors (degree zero). Parameters ---------- G : graph A networkx graph Returns ------- isolates : list List of isolate nodes. Examples -------- >>> G = nx.Graph() >>> G.add_edge(1,2) >>> G.add_node(3) >>> nx.isolates(G) [3] To remove all isolates in the graph use >>> G.remove_nodes_from(nx.isolates(G)) >>> G.nodes() [1, 2] For digraphs isolates have zero in-degree and zero out_degre >>> G = nx.DiGraph([(0,1),(1,2)]) >>> G.add_node(3) >>> nx.isolates(G) [3] """ return [n for (n,d) in G.degree_iter() if d==0] networkx-1.8.1/networkx/algorithms/tests/0000775000175000017500000000000012177457361020473 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/tests/test_swap.py0000664000175000017500000000235412177456333023060 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from networkx import * def test_double_edge_swap(): graph = barabasi_albert_graph(200,1) degrees = sorted(graph.degree().values()) G = double_edge_swap(graph, 40) assert_equal(degrees, sorted(graph.degree().values())) def test_connected_double_edge_swap(): graph = barabasi_albert_graph(200,1) degrees = sorted(graph.degree().values()) G = connected_double_edge_swap(graph, 40) assert_true(is_connected(graph)) assert_equal(degrees, sorted(graph.degree().values())) @raises(NetworkXError) def test_double_edge_swap_small(): G = nx.double_edge_swap(nx.path_graph(3)) @raises(NetworkXError) def test_double_edge_swap_tries(): G = nx.double_edge_swap(nx.path_graph(10),nswap=1,max_tries=0) @raises(NetworkXError) def test_connected_double_edge_swap_small(): G = nx.connected_double_edge_swap(nx.path_graph(3)) @raises(NetworkXError) def test_connected_double_edge_swap_not_connected(): G = nx.path_graph(3) G.add_path([10,11,12]) G = nx.connected_double_edge_swap(G) def test_degree_seq_c4(): G = cycle_graph(4) degrees = sorted(G.degree().values()) G = double_edge_swap(G,1,100) assert_equal(degrees, sorted(G.degree().values())) networkx-1.8.1/networkx/algorithms/tests/test_mis.py0000664000175000017500000000667012177456333022703 0ustar aricaric00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- # $Id: test_maximal_independent_set.py 577 2011-03-01 06:07:53Z lleeoo $ """ Tests for maximal (not maximum) independent sets. """ # Copyright (C) 2004-2010 by # Leo Lopes # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Leo Lopes (leo.lopes@monash.edu)""" from nose.tools import * import networkx as nx import random class TestMaximalIndependantSet(object): def setup(self): self.florentine = nx.Graph() self.florentine.add_edge('Acciaiuoli','Medici') self.florentine.add_edge('Castellani','Peruzzi') self.florentine.add_edge('Castellani','Strozzi') self.florentine.add_edge('Castellani','Barbadori') self.florentine.add_edge('Medici','Barbadori') self.florentine.add_edge('Medici','Ridolfi') self.florentine.add_edge('Medici','Tornabuoni') self.florentine.add_edge('Medici','Albizzi') self.florentine.add_edge('Medici','Salviati') self.florentine.add_edge('Salviati','Pazzi') self.florentine.add_edge('Peruzzi','Strozzi') self.florentine.add_edge('Peruzzi','Bischeri') self.florentine.add_edge('Strozzi','Ridolfi') self.florentine.add_edge('Strozzi','Bischeri') self.florentine.add_edge('Ridolfi','Tornabuoni') self.florentine.add_edge('Tornabuoni','Guadagni') self.florentine.add_edge('Albizzi','Ginori') self.florentine.add_edge('Albizzi','Guadagni') self.florentine.add_edge('Bischeri','Guadagni') self.florentine.add_edge('Guadagni','Lamberteschi') def test_K5(self): """Maximal independent set: K5""" G = nx.complete_graph(5) for node in G: assert_equal(nx.maximal_independent_set(G, [node]), [node]) def test_K55(self): """Maximal independent set: K55""" G = nx.complete_graph(55) for node in G: assert_equal(nx.maximal_independent_set(G, [node]), [node]) def test_exception(self): """Bad input should raise exception.""" G = self.florentine assert_raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Smith"]) assert_raises(nx.NetworkXUnfeasible, nx.maximal_independent_set, G, ["Salviati", "Pazzi"]) def test_florentine_family(self): G = self.florentine indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"]) assert_equal(sorted(indep), sorted(["Medici", "Bischeri", "Castellani", "Pazzi", "Ginori", "Lamberteschi"])) def test_bipartite(self): G = nx.complete_bipartite_graph(12, 34) indep = nx.maximal_independent_set(G, [4, 5, 9, 10]) assert_equal(sorted(indep), list(range(12))) def test_random_graphs(self): """Generate 50 random graphs of different types and sizes and make sure that all sets are independent and maximal.""" for i in range(0, 50, 10): G = nx.random_graphs.erdos_renyi_graph(i*10+1, random.random()) IS = nx.maximal_independent_set(G) assert_false(G.subgraph(IS).edges()) neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS)) for v in set(G.nodes()).difference(IS): assert_true(v in neighbors_of_MIS) networkx-1.8.1/networkx/algorithms/tests/test_clique.py0000664000175000017500000001145512177456333023372 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx import convert_node_labels_to_integers as cnlti class TestCliques: def setUp(self): z=[3,4,3,4,2,4,2,1,1,1,1] self.G=cnlti(nx.generators.havel_hakimi_graph(z),first_label=1) self.cl=list(nx.find_cliques(self.G)) H=nx.complete_graph(6) H=nx.relabel_nodes(H,dict( [(i,i+1) for i in range(6)])) H.remove_edges_from([(2,6),(2,5),(2,4),(1,3),(5,3)]) self.H=H def test_find_cliques1(self): cl=list(nx.find_cliques(self.G)) rcl=nx.find_cliques_recursive(self.G) assert_equal(sorted(map(sorted,cl)), sorted(map(sorted,rcl))) assert_equal(cl, [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]) def test_selfloops(self): self.G.add_edge(1,1) cl=list(nx.find_cliques(self.G)) rcl=nx.find_cliques_recursive(self.G) assert_equal(sorted(map(sorted,cl)), sorted(map(sorted,rcl))) assert_equal(cl, [[2, 6, 1, 3], [2, 6, 4], [5, 4, 7], [8, 9], [10, 11]]) def test_find_cliques2(self): hcl=list(nx.find_cliques(self.H)) assert_equal(sorted(map(sorted,hcl)), [[1, 2], [1, 4, 5, 6], [2, 3], [3, 4, 6]]) def test_clique_number(self): G=self.G assert_equal(nx.graph_clique_number(G),4) assert_equal(nx.graph_clique_number(G,cliques=self.cl),4) def test_number_of_cliques(self): G=self.G assert_equal(nx.graph_number_of_cliques(G),5) assert_equal(nx.graph_number_of_cliques(G,cliques=self.cl),5) assert_equal(nx.number_of_cliques(G,1),1) assert_equal(list(nx.number_of_cliques(G,[1]).values()),[1]) assert_equal(list(nx.number_of_cliques(G,[1,2]).values()),[1, 2]) assert_equal(nx.number_of_cliques(G,[1,2]),{1: 1, 2: 2}) assert_equal(nx.number_of_cliques(G,2),2) assert_equal(nx.number_of_cliques(G), {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) assert_equal(nx.number_of_cliques(G,nodes=G.nodes()), {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) assert_equal(nx.number_of_cliques(G,nodes=[2,3,4]), {2: 2, 3: 1, 4: 2}) assert_equal(nx.number_of_cliques(G,cliques=self.cl), {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) assert_equal(nx.number_of_cliques(G,G.nodes(),cliques=self.cl), {1: 1, 2: 2, 3: 1, 4: 2, 5: 1, 6: 2, 7: 1, 8: 1, 9: 1, 10: 1, 11: 1}) def test_node_clique_number(self): G=self.G assert_equal(nx.node_clique_number(G,1),4) assert_equal(list(nx.node_clique_number(G,[1]).values()),[4]) assert_equal(list(nx.node_clique_number(G,[1,2]).values()),[4, 4]) assert_equal(nx.node_clique_number(G,[1,2]),{1: 4, 2: 4}) assert_equal(nx.node_clique_number(G,1),4) assert_equal(nx.node_clique_number(G), {1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4, 7: 3, 8: 2, 9: 2, 10: 2, 11: 2}) assert_equal(nx.node_clique_number(G,cliques=self.cl), {1: 4, 2: 4, 3: 4, 4: 3, 5: 3, 6: 4, 7: 3, 8: 2, 9: 2, 10: 2, 11: 2}) def test_cliques_containing_node(self): G=self.G assert_equal(nx.cliques_containing_node(G,1), [[2, 6, 1, 3]]) assert_equal(list(nx.cliques_containing_node(G,[1]).values()), [[[2, 6, 1, 3]]]) assert_equal(list(nx.cliques_containing_node(G,[1,2]).values()), [[[2, 6, 1, 3]], [[2, 6, 1, 3], [2, 6, 4]]]) assert_equal(nx.cliques_containing_node(G,[1,2]), {1: [[2, 6, 1, 3]], 2: [[2, 6, 1, 3], [2, 6, 4]]}) assert_equal(nx.cliques_containing_node(G,1), [[2, 6, 1, 3]]) assert_equal(nx.cliques_containing_node(G,2), [[2, 6, 1, 3], [2, 6, 4]]) assert_equal(nx.cliques_containing_node(G,2,cliques=self.cl), [[2, 6, 1, 3], [2, 6, 4]]) assert_equal(len(nx.cliques_containing_node(G)),11) def test_make_clique_bipartite(self): G=self.G B=nx.make_clique_bipartite(G) assert_equal(sorted(B.nodes()), [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) H=nx.project_down(B) assert_equal(H.adj,G.adj) H1=nx.project_up(B) assert_equal(H1.nodes(),[1, 2, 3, 4, 5]) H2=nx.make_max_clique_graph(G) assert_equal(H1.adj,H2.adj) @raises(nx.NetworkXNotImplemented) def test_directed(self): cliques=nx.find_cliques(nx.DiGraph()) networkx-1.8.1/networkx/algorithms/tests/test_distance_measures.py0000664000175000017500000000413112177456333025577 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx class TestDistance: def setUp(self): G=networkx.Graph() from networkx import convert_node_labels_to_integers as cnlti G=cnlti(networkx.grid_2d_graph(4,4),first_label=1,ordering="sorted") self.G=G def test_eccentricity(self): assert_equal(networkx.eccentricity(self.G,1),6) e=networkx.eccentricity(self.G) assert_equal(e[1],6) sp=networkx.shortest_path_length(self.G) e=networkx.eccentricity(self.G,sp=sp) assert_equal(e[1],6) e=networkx.eccentricity(self.G,v=1) assert_equal(e,6) e=networkx.eccentricity(self.G,v=[1,1]) #This behavior changed in version 1.8 (ticket #739) assert_equal(e[1],6) e=networkx.eccentricity(self.G,v=[1,2]) assert_equal(e[1],6) # test against graph with one node G=networkx.path_graph(1) e=networkx.eccentricity(G) assert_equal(e[0],0) e=networkx.eccentricity(G,v=0) assert_equal(e,0) assert_raises(networkx.NetworkXError, networkx.eccentricity, G, 1) # test against empty graph G=networkx.empty_graph() e=networkx.eccentricity(G) assert_equal(e,{}) def test_diameter(self): assert_equal(networkx.diameter(self.G),6) def test_radius(self): assert_equal(networkx.radius(self.G),4) def test_periphery(self): assert_equal(set(networkx.periphery(self.G)),set([1, 4, 13, 16])) def test_center(self): assert_equal(set(networkx.center(self.G)),set([6, 7, 10, 11])) def test_radius_exception(self): G=networkx.Graph() G.add_edge(1,2) G.add_edge(3,4) assert_raises(networkx.NetworkXError, networkx.diameter, G) @raises(networkx.NetworkXError) def test_eccentricity_infinite(self): G=networkx.Graph([(1,2),(3,4)]) e = networkx.eccentricity(G) @raises(networkx.NetworkXError) def test_eccentricity_invalid(self): G=networkx.Graph([(1,2),(3,4)]) e = networkx.eccentricity(G,sp=1) networkx-1.8.1/networkx/algorithms/tests/test_simple_paths.py0000664000175000017500000000450412177456333024575 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx def test_all_simple_paths(): G = nx.path_graph(4) paths = nx.all_simple_paths(G,0,3) assert_equal(list(list(p) for p in paths),[[0,1,2,3]]) def test_all_simple_paths_cutoff(): G = nx.complete_graph(4) paths = nx.all_simple_paths(G,0,1,cutoff=1) assert_equal(list(list(p) for p in paths),[[0,1]]) paths = nx.all_simple_paths(G,0,1,cutoff=2) assert_equal(list(list(p) for p in paths),[[0,1],[0,2,1],[0,3,1]]) def test_all_simple_paths_multigraph(): G = nx.MultiGraph([(1,2),(1,2)]) paths = nx.all_simple_paths(G,1,2) assert_equal(list(list(p) for p in paths),[[1,2],[1,2]]) def test_all_simple_paths_multigraph_with_cutoff(): G = nx.MultiGraph([(1,2),(1,2),(1,10),(10,2)]) paths = nx.all_simple_paths(G,1,2, cutoff=1) assert_equal(list(list(p) for p in paths),[[1,2],[1,2]]) def test_all_simple_paths_directed(): G = nx.DiGraph() G.add_path([1,2,3]) G.add_path([3,2,1]) paths = nx.all_simple_paths(G,1,3) assert_equal(list(list(p) for p in paths),[[1,2,3]]) def test_all_simple_paths_empty(): G = nx.path_graph(4) paths = nx.all_simple_paths(G,0,3,cutoff=2) assert_equal(list(list(p) for p in paths),[]) def hamiltonian_path(G,source): source = next(G.nodes_iter()) neighbors = set(G[source])-set([source]) n = len(G) for target in neighbors: for path in nx.all_simple_paths(G,source,target): if len(path) == n: yield path def test_hamiltonian_path(): from itertools import permutations G=nx.complete_graph(4) paths = [list(p) for p in hamiltonian_path(G,0)] exact = [[0]+list(p) for p in permutations([1,2,3],3) ] assert_equal(sorted(paths),sorted(exact)) def test_cutoff_zero(): G = nx.complete_graph(4) paths = nx.all_simple_paths(G,0,3,cutoff=0) assert_equal(list(list(p) for p in paths),[]) paths = nx.all_simple_paths(nx.MultiGraph(G),0,3,cutoff=0) assert_equal(list(list(p) for p in paths),[]) @raises(nx.NetworkXError) def test_source_missing(): G = nx.Graph() G.add_path([1,2,3]) paths = list(nx.all_simple_paths(nx.MultiGraph(G),0,3)) @raises(nx.NetworkXError) def test_target_missing(): G = nx.Graph() G.add_path([1,2,3]) paths = list(nx.all_simple_paths(nx.MultiGraph(G),1,4)) networkx-1.8.1/networkx/algorithms/tests/test_cycles.py0000664000175000017500000000777312177456333023402 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx import networkx as nx class TestCycles: def setUp(self): G=networkx.Graph() G.add_cycle([0,1,2,3]) G.add_cycle([0,3,4,5]) G.add_cycle([0,1,6,7,8]) G.add_edge(8,9) self.G=G def is_cyclic_permutation(self,a,b): n=len(a) if len(b)!=n: return False l=a+a return any(l[i:i+n]==b for i in range(2*n-n+1)) def test_cycle_basis(self): G=self.G cy=networkx.cycle_basis(G,0) sort_cy= sorted( sorted(c) for c in cy ) assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5]]) cy=networkx.cycle_basis(G,1) sort_cy= sorted( sorted(c) for c in cy ) assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5]]) cy=networkx.cycle_basis(G,9) sort_cy= sorted( sorted(c) for c in cy ) assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5]]) # test disconnected graphs G.add_cycle(list("ABC")) cy=networkx.cycle_basis(G,9) sort_cy= sorted(sorted(c) for c in cy[:-1]) + [sorted(cy[-1])] assert_equal(sort_cy, [[0,1,2,3],[0,1,6,7,8],[0,3,4,5],['A','B','C']]) @raises(nx.NetworkXNotImplemented) def test_cycle_basis(self): G=nx.DiGraph() cy=networkx.cycle_basis(G,0) @raises(nx.NetworkXNotImplemented) def test_cycle_basis(self): G=nx.MultiGraph() cy=networkx.cycle_basis(G,0) def test_simple_cycles(self): G = nx.DiGraph([(0, 0), (0, 1), (0, 2), (1, 2), (2, 0), (2, 1), (2, 2)]) cc=sorted(nx.simple_cycles(G)) ca=[[0], [0, 1, 2], [0, 2], [1, 2], [2]] for c in cc: assert_true(any(self.is_cyclic_permutation(c,rc) for rc in ca)) @raises(nx.NetworkXNotImplemented) def test_simple_cycles_graph(self): G = nx.Graph() c = sorted(nx.simple_cycles(G)) def test_unsortable(self): # TODO What does this test do? das 6/2013 G=nx.DiGraph() G.add_cycle(['a',1]) c=list(nx.simple_cycles(G)) def test_simple_cycles_small(self): G = nx.DiGraph() G.add_cycle([1,2,3]) c=sorted(nx.simple_cycles(G)) assert_equal(len(c),1) assert_true(self.is_cyclic_permutation(c[0],[1,2,3])) G.add_cycle([10,20,30]) cc=sorted(nx.simple_cycles(G)) ca=[[1,2,3],[10,20,30]] for c in cc: assert_true(any(self.is_cyclic_permutation(c,rc) for rc in ca)) def test_simple_cycles_empty(self): G = nx.DiGraph() assert_equal(list(nx.simple_cycles(G)),[]) def test_complete_directed_graph(self): # see table 2 in Johnson's paper ncircuits=[1,5,20,84,409,2365,16064] for n,c in zip(range(2,9),ncircuits): G=nx.DiGraph(nx.complete_graph(n)) assert_equal(len(list(nx.simple_cycles(G))),c) def worst_case_graph(self,k): # see figure 1 in Johnson's paper # this graph has excactly 3k simple cycles G=nx.DiGraph() for n in range(2,k+2): G.add_edge(1,n) G.add_edge(n,k+2) G.add_edge(2*k+1,1) for n in range(k+2,2*k+2): G.add_edge(n,2*k+2) G.add_edge(n,n+1) G.add_edge(2*k+3,k+2) for n in range(2*k+3,3*k+3): G.add_edge(2*k+2,n) G.add_edge(n,3*k+3) G.add_edge(3*k+3,2*k+2) return G def test_worst_case_graph(self): # see figure 1 in Johnson's paper for k in range(3,10): G=self.worst_case_graph(k) l=len(list(nx.simple_cycles(G))) assert_equal(l,3*k) def test_recursive_simple_and_not(self): for k in range(2,10): G=self.worst_case_graph(k) cc=sorted(nx.simple_cycles(G)) rcc=sorted(nx.recursive_simple_cycles(G)) assert_equal(len(cc),len(rcc)) for c in cc: assert_true(any(self.is_cyclic_permutation(c,rc) for rc in rcc)) networkx-1.8.1/networkx/algorithms/tests/test_mst.py0000664000175000017500000001140712177456333022710 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestMST: def setUp(self): # example from Wikipedia: http://en.wikipedia.org/wiki/Kruskal's_algorithm G=nx.Graph() edgelist = [(0,3,[('weight',5)]), (0,1,[('weight',7)]), (1,3,[('weight',9)]), (1,2,[('weight',8)]), (1,4,[('weight',7)]), (3,4,[('weight',15)]), (3,5,[('weight',6)]), (2,4,[('weight',5)]), (4,5,[('weight',8)]), (4,6,[('weight',9)]), (5,6,[('weight',11)])] G.add_edges_from(edgelist) self.G=G tree_edgelist = [(0,1,{'weight':7}), (0,3,{'weight':5}), (3,5,{'weight':6}), (1,4,{'weight':7}), (4,2,{'weight':5}), (4,6,{'weight':9})] self.tree_edgelist=sorted((sorted((u, v))[0], sorted((u, v))[1], d) for u,v,d in tree_edgelist) def test_mst(self): T=nx.minimum_spanning_tree(self.G) assert_equal(T.edges(data=True),self.tree_edgelist) def test_mst_edges(self): edgelist=sorted(nx.minimum_spanning_edges(self.G)) assert_equal(edgelist,self.tree_edgelist) def test_mst_disconnected(self): G=nx.Graph() G.add_path([1,2]) G.add_path([10,20]) T=nx.minimum_spanning_tree(G) assert_equal(sorted(T.edges()),[(1, 2), (20, 10)]) assert_equal(sorted(T.nodes()),[1, 2, 10, 20]) def test_mst_isolate(self): G=nx.Graph() G.add_nodes_from([1,2]) T=nx.minimum_spanning_tree(G) assert_equal(sorted(T.nodes()),[1, 2]) assert_equal(sorted(T.edges()),[]) def test_mst_attributes(self): G=nx.Graph() G.add_edge(1,2,weight=1,color='red',distance=7) G.add_edge(2,3,weight=1,color='green',distance=2) G.add_edge(1,3,weight=10,color='blue',distance=1) G.add_node(13,color='purple') G.graph['foo']='bar' T=nx.minimum_spanning_tree(G) assert_equal(T.graph,G.graph) assert_equal(T.node[13],G.node[13]) assert_equal(T.edge[1][2],G.edge[1][2]) def test_mst_edges_specify_weight(self): G=nx.Graph() G.add_edge(1,2,weight=1,color='red',distance=7) G.add_edge(1,3,weight=30,color='blue',distance=1) G.add_edge(2,3,weight=1,color='green',distance=1) G.add_node(13,color='purple') G.graph['foo']='bar' T=nx.minimum_spanning_tree(G) assert_equal(sorted(T.nodes()),[1,2,3,13]) assert_equal(sorted(T.edges()),[(1,2),(2,3)]) T=nx.minimum_spanning_tree(G,weight='distance') assert_equal(sorted(T.edges()),[(1,3),(2,3)]) assert_equal(sorted(T.nodes()),[1,2,3,13]) def test_prim_mst(self): T=nx.prim_mst(self.G) assert_equal(T.edges(data=True),self.tree_edgelist) def test_prim_mst_edges(self): edgelist=sorted(nx.prim_mst_edges(self.G)) edgelist=sorted((sorted((u, v))[0], sorted((u, v))[1], d) for u,v,d in edgelist) assert_equal(edgelist,self.tree_edgelist) def test_prim_mst_disconnected(self): G=nx.Graph() G.add_path([1,2]) G.add_path([10,20]) T=nx.prim_mst(G) assert_equal(sorted(T.edges()),[(1, 2), (20, 10)]) assert_equal(sorted(T.nodes()),[1, 2, 10, 20]) def test_prim_mst_isolate(self): G=nx.Graph() G.add_nodes_from([1,2]) T=nx.prim_mst(G) assert_equal(sorted(T.nodes()),[1, 2]) assert_equal(sorted(T.edges()),[]) def test_prim_mst_attributes(self): G=nx.Graph() G.add_edge(1,2,weight=1,color='red',distance=7) G.add_edge(2,3,weight=1,color='green',distance=2) G.add_edge(1,3,weight=10,color='blue',distance=1) G.add_node(13,color='purple') G.graph['foo']='bar' T=nx.prim_mst(G) assert_equal(T.graph,G.graph) assert_equal(T.node[13],G.node[13]) assert_equal(T.edge[1][2],G.edge[1][2]) def test_prim_mst_edges_specify_weight(self): G=nx.Graph() G.add_edge(1,2,weight=1,color='red',distance=7) G.add_edge(1,3,weight=30,color='blue',distance=1) G.add_edge(2,3,weight=1,color='green',distance=1) G.add_node(13,color='purple') G.graph['foo']='bar' T=nx.prim_mst(G) assert_equal(sorted(T.nodes()),[1,2,3,13]) assert_equal(sorted(T.edges()),[(1,2),(2,3)]) T=nx.prim_mst(G,weight='distance') assert_equal(sorted(T.edges()),[(1,3),(2,3)]) assert_equal(sorted(T.nodes()),[1,2,3,13]) networkx-1.8.1/networkx/algorithms/tests/test_core.py0000664000175000017500000001030312177456333023027 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestCore: def setUp(self): # G is the example graph in Figure 1 from Batagelj and # Zaversnik's paper titled An O(m) Algorithm for Cores # Decomposition of Networks, 2003, # http://arXiv.org/abs/cs/0310049. With nodes labeled as # shown, the 3-core is given by nodes 1-8, the 2-core by nodes # 9-16, the 1-core by nodes 17-20 and node 21 is in the # 0-core. t1=nx.convert_node_labels_to_integers(nx.tetrahedral_graph(),1) t2=nx.convert_node_labels_to_integers(t1,5) G=nx.union(t1,t2) G.add_edges_from( [(3,7), (2,11), (11,5), (11,12), (5,12), (12,19), (12,18), (3,9), (7,9), (7,10), (9,10), (9,20), (17,13), (13,14), (14,15), (15,16), (16,13)]) G.add_node(21) self.G=G # Create the graph H resulting from the degree sequence # [0,1,2,2,2,2,3] when using the Havel-Hakimi algorithm. degseq=[0,1,2,2,2,2,3] H = nx.havel_hakimi_graph(degseq) mapping = {6:0, 0:1, 4:3, 5:6, 3:4, 1:2, 2:5 } self.H = nx.relabel_nodes(H, mapping) def test_trivial(self): """Empty graph""" G = nx.Graph() assert_equal(nx.find_cores(G),{}) def test_find_cores(self): cores=nx.find_cores(self.G) nodes_by_core=[] for val in [0,1,2,3]: nodes_by_core.append( sorted([k for k in cores if cores[k]==val])) assert_equal(nodes_by_core[0],[21]) assert_equal(nodes_by_core[1],[17, 18, 19, 20]) assert_equal(nodes_by_core[2],[9, 10, 11, 12, 13, 14, 15, 16]) assert_equal(nodes_by_core[3], [1, 2, 3, 4, 5, 6, 7, 8]) def test_core_number(self): # smoke test real name cores=nx.core_number(self.G) def test_find_cores2(self): cores=nx.find_cores(self.H) nodes_by_core=[] for val in [0,1,2]: nodes_by_core.append( sorted([k for k in cores if cores[k]==val])) assert_equal(nodes_by_core[0],[0]) assert_equal(nodes_by_core[1],[1, 3]) assert_equal(nodes_by_core[2],[2, 4, 5, 6]) def test_main_core(self): main_core_subgraph=nx.k_core(self.H) assert_equal(sorted(main_core_subgraph.nodes()),[2,4,5,6]) def test_k_core(self): # k=0 k_core_subgraph=nx.k_core(self.H,k=0) assert_equal(sorted(k_core_subgraph.nodes()),sorted(self.H.nodes())) # k=1 k_core_subgraph=nx.k_core(self.H,k=1) assert_equal(sorted(k_core_subgraph.nodes()),[1,2,3,4,5,6]) # k=2 k_core_subgraph=nx.k_core(self.H,k=2) assert_equal(sorted(k_core_subgraph.nodes()),[2,4,5,6]) def test_main_crust(self): main_crust_subgraph=nx.k_crust(self.H) assert_equal(sorted(main_crust_subgraph.nodes()),[0,1,3]) def test_k_crust(self): # k=0 k_crust_subgraph=nx.k_crust(self.H,k=2) assert_equal(sorted(k_crust_subgraph.nodes()),sorted(self.H.nodes())) # k=1 k_crust_subgraph=nx.k_crust(self.H,k=1) assert_equal(sorted(k_crust_subgraph.nodes()),[0,1,3]) # k=2 k_crust_subgraph=nx.k_crust(self.H,k=0) assert_equal(sorted(k_crust_subgraph.nodes()),[0]) def test_main_shell(self): main_shell_subgraph=nx.k_shell(self.H) assert_equal(sorted(main_shell_subgraph.nodes()),[2,4,5,6]) def test_k_shell(self): # k=0 k_shell_subgraph=nx.k_shell(self.H,k=2) assert_equal(sorted(k_shell_subgraph.nodes()),[2,4,5,6]) # k=1 k_shell_subgraph=nx.k_shell(self.H,k=1) assert_equal(sorted(k_shell_subgraph.nodes()),[1,3]) # k=2 k_shell_subgraph=nx.k_shell(self.H,k=0) assert_equal(sorted(k_shell_subgraph.nodes()),[0]) def test_k_corona(self): # k=0 k_corona_subgraph=nx.k_corona(self.H,k=2) assert_equal(sorted(k_corona_subgraph.nodes()),[2,4,5,6]) # k=1 k_corona_subgraph=nx.k_corona(self.H,k=1) assert_equal(sorted(k_corona_subgraph.nodes()),[1]) # k=2 k_corona_subgraph=nx.k_corona(self.H,k=0) assert_equal(sorted(k_corona_subgraph.nodes()),[0]) networkx-1.8.1/networkx/algorithms/tests/test_euler.py0000664000175000017500000000502212177456333023215 0ustar aricaric00000000000000#!/usr/bin/env python # run with nose: nosetests -v test_euler.py from nose.tools import * import networkx as nx from networkx import is_eulerian,eulerian_circuit class TestEuler: def test_is_eulerian(self): assert_true(is_eulerian(nx.complete_graph(5))) assert_true(is_eulerian(nx.complete_graph(7))) assert_true(is_eulerian(nx.hypercube_graph(4))) assert_true(is_eulerian(nx.hypercube_graph(6))) assert_false(is_eulerian(nx.complete_graph(4))) assert_false(is_eulerian(nx.complete_graph(6))) assert_false(is_eulerian(nx.hypercube_graph(3))) assert_false(is_eulerian(nx.hypercube_graph(5))) assert_false(is_eulerian(nx.petersen_graph())) assert_false(is_eulerian(nx.path_graph(4))) def test_is_eulerian2(self): # not connected G = nx.Graph() G.add_nodes_from([1,2,3]) assert_false(is_eulerian(G)) # not strongly connected G = nx.DiGraph() G.add_nodes_from([1,2,3]) assert_false(is_eulerian(G)) G = nx.MultiDiGraph() G.add_edge(1,2) G.add_edge(2,3) G.add_edge(2,3) G.add_edge(3,1) assert_false(is_eulerian(G)) def test_eulerian_circuit_cycle(self): G=nx.cycle_graph(4) edges=list(eulerian_circuit(G,source=0)) nodes=[u for u,v in edges] assert_equal(nodes,[0,1,2,3]) assert_equal(edges,[(0,1),(1,2),(2,3),(3,0)]) edges=list(eulerian_circuit(G,source=1)) nodes=[u for u,v in edges] assert_equal(nodes,[1,0,3,2]) assert_equal(edges,[(1,0),(0,3),(3,2),(2,1)]) def test_eulerian_circuit_digraph(self): G=nx.DiGraph() G.add_cycle([0,1,2,3]) edges=list(eulerian_circuit(G,source=0)) nodes=[u for u,v in edges] assert_equal(nodes,[0,1,2,3]) assert_equal(edges,[(0,1),(1,2),(2,3),(3,0)]) edges=list(eulerian_circuit(G,source=1)) nodes=[u for u,v in edges] assert_equal(nodes,[1,2,3,0]) assert_equal(edges,[(1,2),(2,3),(3,0),(0,1)]) def test_eulerian_circuit_multigraph(self): G=nx.MultiGraph() G.add_cycle([0,1,2,3]) G.add_edge(1,2) G.add_edge(1,2) edges=list(eulerian_circuit(G,source=0)) nodes=[u for u,v in edges] assert_equal(nodes,[0,1,2,1,2,3]) assert_equal(edges,[(0,1),(1,2),(2,1),(1,2),(2,3),(3,0)]) @raises(nx.NetworkXError) def test_not_eulerian(self): f=list(eulerian_circuit(nx.complete_graph(4))) networkx-1.8.1/networkx/algorithms/tests/test_block.py0000664000175000017500000000716012177456333023200 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx class TestBlock: def test_path(self): G=networkx.path_graph(6) partition=[[0,1],[2,3],[4,5]] M=networkx.blockmodel(G,partition) assert_equal(sorted(M.nodes()),[0,1,2]) assert_equal(sorted(M.edges()),[(0,1),(1,2)]) for n in M.nodes(): assert_equal(M.node[n]['nedges'],1) assert_equal(M.node[n]['nnodes'],2) assert_equal(M.node[n]['density'],1.0) def test_multigraph_path(self): G=networkx.MultiGraph(networkx.path_graph(6)) partition=[[0,1],[2,3],[4,5]] M=networkx.blockmodel(G,partition,multigraph=True) assert_equal(sorted(M.nodes()),[0,1,2]) assert_equal(sorted(M.edges()),[(0,1),(1,2)]) for n in M.nodes(): assert_equal(M.node[n]['nedges'],1) assert_equal(M.node[n]['nnodes'],2) assert_equal(M.node[n]['density'],1.0) def test_directed_path(self): G = networkx.DiGraph() G.add_path(list(range(6))) partition=[[0,1],[2,3],[4,5]] M=networkx.blockmodel(G,partition) assert_equal(sorted(M.nodes()),[0,1,2]) assert_equal(sorted(M.edges()),[(0,1),(1,2)]) for n in M.nodes(): assert_equal(M.node[n]['nedges'],1) assert_equal(M.node[n]['nnodes'],2) assert_equal(M.node[n]['density'],0.5) def test_directed_multigraph_path(self): G = networkx.MultiDiGraph() G.add_path(list(range(6))) partition=[[0,1],[2,3],[4,5]] M=networkx.blockmodel(G,partition,multigraph=True) assert_equal(sorted(M.nodes()),[0,1,2]) assert_equal(sorted(M.edges()),[(0,1),(1,2)]) for n in M.nodes(): assert_equal(M.node[n]['nedges'],1) assert_equal(M.node[n]['nnodes'],2) assert_equal(M.node[n]['density'],0.5) @raises(networkx.NetworkXException) def test_overlapping(self): G=networkx.path_graph(6) partition=[[0,1,2],[2,3],[4,5]] M=networkx.blockmodel(G,partition) def test_weighted_path(self): G=networkx.path_graph(6) G[0][1]['weight']=1 G[1][2]['weight']=2 G[2][3]['weight']=3 G[3][4]['weight']=4 G[4][5]['weight']=5 partition=[[0,1],[2,3],[4,5]] M=networkx.blockmodel(G,partition) assert_equal(sorted(M.nodes()),[0,1,2]) assert_equal(sorted(M.edges()),[(0,1),(1,2)]) assert_equal(M[0][1]['weight'],2) assert_equal(M[1][2]['weight'],4) for n in M.nodes(): assert_equal(M.node[n]['nedges'],1) assert_equal(M.node[n]['nnodes'],2) assert_equal(M.node[n]['density'],1.0) def test_barbell(self): G=networkx.barbell_graph(3,0) partition=[[0,1,2],[3,4,5]] M=networkx.blockmodel(G,partition) assert_equal(sorted(M.nodes()),[0,1]) assert_equal(sorted(M.edges()),[(0,1)]) for n in M.nodes(): assert_equal(M.node[n]['nedges'],3) assert_equal(M.node[n]['nnodes'],3) assert_equal(M.node[n]['density'],1.0) def test_barbell_plus(self): G=networkx.barbell_graph(3,0) G.add_edge(0,5) # add extra edge between bells partition=[[0,1,2],[3,4,5]] M=networkx.blockmodel(G,partition) assert_equal(sorted(M.nodes()),[0,1]) assert_equal(sorted(M.edges()),[(0,1)]) assert_equal(M[0][1]['weight'],2) for n in M.nodes(): assert_equal(M.node[n]['nedges'],3) assert_equal(M.node[n]['nnodes'],3) assert_equal(M.node[n]['density'],1.0) networkx-1.8.1/networkx/algorithms/tests/test_cluster.py0000664000175000017500000001623112177456333023566 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestTriangles: def test_empty(self): G = nx.Graph() assert_equal(list(nx.triangles(G).values()),[]) def test_path(self): G = nx.path_graph(10) assert_equal(list(nx.triangles(G).values()), [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) assert_equal(nx.triangles(G), {0: 0, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0, 9: 0}) def test_cubical(self): G = nx.cubical_graph() assert_equal(list(nx.triangles(G).values()), [0, 0, 0, 0, 0, 0, 0, 0]) assert_equal(nx.triangles(G,1),0) assert_equal(list(nx.triangles(G,[1,2]).values()),[0, 0]) assert_equal(nx.triangles(G,1),0) assert_equal(nx.triangles(G,[1,2]),{1: 0, 2: 0}) def test_k5(self): G = nx.complete_graph(5) assert_equal(list(nx.triangles(G).values()),[6, 6, 6, 6, 6]) assert_equal(sum(nx.triangles(G).values())/3.0,10) assert_equal(nx.triangles(G,1),6) G.remove_edge(1,2) assert_equal(list(nx.triangles(G).values()),[5, 3, 3, 5, 5]) assert_equal(nx.triangles(G,1),3) class TestWeightedClustering: def test_clustering(self): G = nx.Graph() assert_equal(list(nx.clustering(G,weight='weight').values()),[]) assert_equal(nx.clustering(G),{}) def test_path(self): G = nx.path_graph(10) assert_equal(list(nx.clustering(G,weight='weight').values()), [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) assert_equal(nx.clustering(G,weight='weight'), {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) def test_cubical(self): G = nx.cubical_graph() assert_equal(list(nx.clustering(G,weight='weight').values()), [0, 0, 0, 0, 0, 0, 0, 0]) assert_equal(nx.clustering(G,1),0) assert_equal(list(nx.clustering(G,[1,2],weight='weight').values()),[0, 0]) assert_equal(nx.clustering(G,1,weight='weight'),0) assert_equal(nx.clustering(G,[1,2],weight='weight'),{1: 0, 2: 0}) def test_k5(self): G = nx.complete_graph(5) assert_equal(list(nx.clustering(G,weight='weight').values()),[1, 1, 1, 1, 1]) assert_equal(nx.average_clustering(G,weight='weight'),1) G.remove_edge(1,2) assert_equal(list(nx.clustering(G,weight='weight').values()), [5./6., 1.0, 1.0, 5./6., 5./6.]) assert_equal(nx.clustering(G,[1,4],weight='weight'),{1: 1.0, 4: 0.83333333333333337}) def test_triangle_and_edge(self): G=nx.Graph() G.add_cycle([0,1,2]) G.add_edge(0,4,weight=2) assert_equal(nx.clustering(G)[0],1.0/3.0) assert_equal(nx.clustering(G,weight='weight')[0],1.0/6.0) class TestClustering: def test_clustering(self): G = nx.Graph() assert_equal(list(nx.clustering(G).values()),[]) assert_equal(nx.clustering(G),{}) def test_path(self): G = nx.path_graph(10) assert_equal(list(nx.clustering(G).values()), [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) assert_equal(nx.clustering(G), {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) def test_cubical(self): G = nx.cubical_graph() assert_equal(list(nx.clustering(G).values()), [0, 0, 0, 0, 0, 0, 0, 0]) assert_equal(nx.clustering(G,1),0) assert_equal(list(nx.clustering(G,[1,2]).values()),[0, 0]) assert_equal(nx.clustering(G,1),0) assert_equal(nx.clustering(G,[1,2]),{1: 0, 2: 0}) def test_k5(self): G = nx.complete_graph(5) assert_equal(list(nx.clustering(G).values()),[1, 1, 1, 1, 1]) assert_equal(nx.average_clustering(G),1) G.remove_edge(1,2) assert_equal(list(nx.clustering(G).values()), [5./6., 1.0, 1.0, 5./6., 5./6.]) assert_equal(nx.clustering(G,[1,4]),{1: 1.0, 4: 0.83333333333333337}) class TestTransitivity: def test_transitivity(self): G = nx.Graph() assert_equal(nx.transitivity(G),0.0) def test_path(self): G = nx.path_graph(10) assert_equal(nx.transitivity(G),0.0) def test_cubical(self): G = nx.cubical_graph() assert_equal(nx.transitivity(G),0.0) def test_k5(self): G = nx.complete_graph(5) assert_equal(nx.transitivity(G),1.0) G.remove_edge(1,2) assert_equal(nx.transitivity(G),0.875) # def test_clustering_transitivity(self): # # check that weighted average of clustering is transitivity # G = nx.complete_graph(5) # G.remove_edge(1,2) # t1=nx.transitivity(G) # (cluster_d2,weights)=nx.clustering(G,weights=True) # trans=[] # for v in G.nodes(): # trans.append(cluster_d2[v]*weights[v]) # t2=sum(trans) # assert_almost_equal(abs(t1-t2),0) class TestSquareClustering: def test_clustering(self): G = nx.Graph() assert_equal(list(nx.square_clustering(G).values()),[]) assert_equal(nx.square_clustering(G),{}) def test_path(self): G = nx.path_graph(10) assert_equal(list(nx.square_clustering(G).values()), [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) assert_equal(nx.square_clustering(G), {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0, 6: 0.0, 7: 0.0, 8: 0.0, 9: 0.0}) def test_cubical(self): G = nx.cubical_graph() assert_equal(list(nx.square_clustering(G).values()), [0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5]) assert_equal(list(nx.square_clustering(G,[1,2]).values()),[0.5, 0.5]) assert_equal(nx.square_clustering(G,[1])[1],0.5) assert_equal(nx.square_clustering(G,[1,2]),{1: 0.5, 2: 0.5}) def test_k5(self): G = nx.complete_graph(5) assert_equal(list(nx.square_clustering(G).values()),[1, 1, 1, 1, 1]) def test_bipartite_k5(self): G = nx.complete_bipartite_graph(5,5) assert_equal(list(nx.square_clustering(G).values()), [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]) def test_lind_square_clustering(self): """Test C4 for figure 1 Lind et al (2005)""" G = nx.Graph([(1,2),(1,3),(1,6),(1,7),(2,4),(2,5), (3,4),(3,5),(6,7),(7,8),(6,8),(7,9), (7,10),(6,11),(6,12),(2,13),(2,14),(3,15),(3,16)]) G1 = G.subgraph([1,2,3,4,5,13,14,15,16]) G2 = G.subgraph([1,6,7,8,9,10,11,12]) assert_equal(nx.square_clustering(G, [1])[1], 3/75.0) assert_equal(nx.square_clustering(G1, [1])[1], 2/6.0) assert_equal(nx.square_clustering(G2, [1])[1], 1/5.0) def test_average_clustering(): G=nx.cycle_graph(3) G.add_edge(2,3) assert_equal(nx.average_clustering(G),(1+1+1/3.0)/4.0) assert_equal(nx.average_clustering(G,count_zeros=True),(1+1+1/3.0)/4.0) assert_equal(nx.average_clustering(G,count_zeros=False),(1+1+1/3.0)/3.0) networkx-1.8.1/networkx/algorithms/tests/test_richclub.py0000664000175000017500000000173612177456333023704 0ustar aricaric00000000000000import networkx as nx from nose.tools import * def test_richclub(): G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)]) rc = nx.richclub.rich_club_coefficient(G,normalized=False) assert_equal(rc,{0: 12.0/30,1:8.0/12}) # test single value rc0 = nx.richclub.rich_club_coefficient(G,normalized=False)[0] assert_equal(rc0,12.0/30.0) def test_richclub_normalized(): G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)]) rcNorm = nx.richclub.rich_club_coefficient(G,Q=2) assert_equal(rcNorm,{0:1.0,1:1.0}) def test_richclub2(): T = nx.balanced_tree(2,10) rc = nx.richclub.rich_club_coefficient(T,normalized=False) assert_equal(rc,{0:4092/(2047*2046.0), 1:(2044.0/(1023*1022)), 2:(2040.0/(1022*1021))}) #def test_richclub2_normalized(): # T = nx.balanced_tree(2,10) # rcNorm = nx.richclub.rich_club_coefficient(T,Q=2) # assert_true(rcNorm[0] ==1.0 and rcNorm[1] < 0.9 and rcNorm[2] < 0.9) networkx-1.8.1/networkx/algorithms/tests/test_graphical.py0000664000175000017500000001025012177456333024032 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx def test_valid_degree_sequence1(): n = 100 p = .3 for i in range(10): G = nx.erdos_renyi_graph(n,p) deg = list(G.degree().values()) assert_true( nx.is_valid_degree_sequence(deg, method='eg') ) assert_true( nx.is_valid_degree_sequence(deg, method='hh') ) def test_valid_degree_sequence2(): n = 100 for i in range(10): G = nx.barabasi_albert_graph(n,1) deg = list(G.degree().values()) assert_true( nx.is_valid_degree_sequence(deg, method='eg') ) assert_true( nx.is_valid_degree_sequence(deg, method='hh') ) @raises(nx.NetworkXException) def test_string_input(): a = nx.is_valid_degree_sequence([],'foo') def test_negative_input(): assert_false(nx.is_valid_degree_sequence([-1],'hh')) assert_false(nx.is_valid_degree_sequence([-1],'eg')) assert_false(nx.is_valid_degree_sequence([72.5],'eg')) def test_atlas(): for graph in nx.graph_atlas_g(): deg = list(graph.degree().values()) assert_true( nx.is_valid_degree_sequence(deg, method='eg') ) assert_true( nx.is_valid_degree_sequence(deg, method='hh') ) def test_small_graph_true(): z=[5,3,3,3,3,2,2,2,1,1,1] assert_true(nx.is_valid_degree_sequence(z, method='hh')) assert_true(nx.is_valid_degree_sequence(z, method='eg')) z=[10,3,3,3,3,2,2,2,2,2,2] assert_true(nx.is_valid_degree_sequence(z, method='hh')) assert_true(nx.is_valid_degree_sequence(z, method='eg')) z=[1, 1, 1, 1, 1, 2, 2, 2, 3, 4] assert_true(nx.is_valid_degree_sequence(z, method='hh')) assert_true(nx.is_valid_degree_sequence(z, method='eg')) def test_small_graph_false(): z=[1000,3,3,3,3,2,2,2,1,1,1] assert_false(nx.is_valid_degree_sequence(z, method='hh')) assert_false(nx.is_valid_degree_sequence(z, method='eg')) z=[6,5,4,4,2,1,1,1] assert_false(nx.is_valid_degree_sequence(z, method='hh')) assert_false(nx.is_valid_degree_sequence(z, method='eg')) z=[1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] assert_false(nx.is_valid_degree_sequence(z, method='hh')) assert_false(nx.is_valid_degree_sequence(z, method='eg')) def test_directed_degree_sequence(): # Test a range of valid directed degree sequences n, r = 100, 10 p = 1.0 / r for i in range(r): G = nx.erdos_renyi_graph(n,p*(i+1),None,True) din = list(G.in_degree().values()) dout = list(G.out_degree().values()) assert_true(nx.is_digraphical(din, dout)) def test_small_directed_sequences(): dout=[5,3,3,3,3,2,2,2,1,1,1] din=[3,3,3,3,3,2,2,2,2,2,1] assert_true(nx.is_digraphical(din, dout)) # Test nongraphical directed sequence dout = [1000,3,3,3,3,2,2,2,1,1,1] din=[103,102,102,102,102,102,102,102,102,102] assert_false(nx.is_digraphical(din, dout)) # Test digraphical small sequence dout=[1, 1, 1, 1, 1, 2, 2, 2, 3, 4] din=[2, 2, 2, 2, 2, 2, 2, 2, 1, 1] assert_true(nx.is_digraphical(din, dout)) # Test nonmatching sum din=[2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1] assert_false(nx.is_digraphical(din, dout)) # Test for negative integer in sequence din=[2, 2, 2, -2, 2, 2, 2, 2, 1, 1, 4] assert_false(nx.is_digraphical(din, dout)) def test_multi_sequence(): # Test nongraphical multi sequence seq=[1000,3,3,3,3,2,2,2,1,1] assert_false(nx.is_multigraphical(seq)) # Test small graphical multi sequence seq=[6,5,4,4,2,1,1,1] assert_true(nx.is_multigraphical(seq)) # Test for negative integer in sequence seq=[6,5,4,-4,2,1,1,1] assert_false(nx.is_multigraphical(seq)) # Test for sequence with odd sum seq=[1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] assert_false(nx.is_multigraphical(seq)) def test_pseudo_sequence(): # Test small valid pseudo sequence seq=[1000,3,3,3,3,2,2,2,1,1] assert_true(nx.is_pseudographical(seq)) # Test for sequence with odd sum seq=[1000,3,3,3,3,2,2,2,1,1,1] assert_false(nx.is_pseudographical(seq)) # Test for negative integer in sequence seq=[1000,3,3,3,3,2,2,-2,1,1] assert_false(nx.is_pseudographical(seq)) networkx-1.8.1/networkx/algorithms/tests/test_vitality.py0000664000175000017500000000214412177456333023750 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestVitality: def test_closeness_vitality_unweighted(self): G=nx.cycle_graph(3) v=nx.closeness_vitality(G) assert_equal(v,{0:4.0, 1:4.0, 2:4.0}) assert_equal(v[0],4.0) def test_closeness_vitality_weighted(self): G=nx.Graph() G.add_cycle([0,1,2],weight=2) v=nx.closeness_vitality(G,weight='weight') assert_equal(v,{0:8.0, 1:8.0, 2:8.0}) def test_closeness_vitality_unweighted_digraph(self): G=nx.DiGraph() G.add_cycle([0,1,2]) v=nx.closeness_vitality(G) assert_equal(v,{0:8.0, 1:8.0, 2:8.0}) def test_closeness_vitality_weighted_digraph(self): G=nx.DiGraph() G.add_cycle([0,1,2],weight=2) v=nx.closeness_vitality(G,weight='weight') assert_equal(v,{0:16.0, 1:16.0, 2:16.0}) def test_closeness_vitality_weighted_multidigraph(self): G=nx.MultiDiGraph() G.add_cycle([0,1,2],weight=2) v=nx.closeness_vitality(G,weight='weight') assert_equal(v,{0:16.0, 1:16.0, 2:16.0}) networkx-1.8.1/networkx/algorithms/tests/test_dag.py0000664000175000017500000001314112177456333022635 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestDAG: def setUp(self): pass def test_topological_sort1(self): DG=nx.DiGraph() DG.add_edges_from([(1,2),(1,3),(2,3)]) assert_equal(nx.topological_sort(DG),[1, 2, 3]) assert_equal(nx.topological_sort_recursive(DG),[1, 2, 3]) DG.add_edge(3,2) assert_raises(nx.NetworkXUnfeasible, nx.topological_sort, DG) assert_raises(nx.NetworkXUnfeasible, nx.topological_sort_recursive, DG) DG.remove_edge(2,3) assert_equal(nx.topological_sort(DG),[1, 3, 2]) assert_equal(nx.topological_sort_recursive(DG),[1, 3, 2]) def test_is_directed_acyclic_graph(self): G = nx.generators.complete_graph(2) assert_false(nx.is_directed_acyclic_graph(G)) assert_false(nx.is_directed_acyclic_graph(G.to_directed())) assert_false(nx.is_directed_acyclic_graph(nx.Graph([(3, 4), (4, 5)]))) assert_true(nx.is_directed_acyclic_graph(nx.DiGraph([(3, 4), (4, 5)]))) def test_topological_sort2(self): DG=nx.DiGraph({1:[2],2:[3],3:[4], 4:[5],5:[1],11:[12], 12:[13],13:[14],14:[15]}) assert_raises(nx.NetworkXUnfeasible, nx.topological_sort, DG) assert_raises(nx.NetworkXUnfeasible, nx.topological_sort_recursive, DG) assert_false(nx.is_directed_acyclic_graph(DG)) DG.remove_edge(1,2) assert_equal(nx.topological_sort_recursive(DG), [11, 12, 13, 14, 15, 2, 3, 4, 5, 1]) assert_equal(nx.topological_sort(DG), [11, 12, 13, 14, 15, 2, 3, 4, 5, 1]) assert_true(nx.is_directed_acyclic_graph(DG)) def test_topological_sort3(self): DG=nx.DiGraph() DG.add_edges_from([(1,i) for i in range(2,5)]) DG.add_edges_from([(2,i) for i in range(5,9)]) DG.add_edges_from([(6,i) for i in range(9,12)]) DG.add_edges_from([(4,i) for i in range(12,15)]) assert_equal(nx.topological_sort_recursive(DG), [1, 4, 14, 13, 12, 3, 2, 7, 6, 11, 10, 9, 5, 8]) assert_equal(nx.topological_sort(DG), [1, 2, 8, 5, 6, 9, 10, 11, 7, 3, 4, 12, 13, 14]) DG.add_edge(14,1) assert_raises(nx.NetworkXUnfeasible, nx.topological_sort, DG) assert_raises(nx.NetworkXUnfeasible, nx.topological_sort_recursive, DG) def test_topological_sort4(self): G=nx.Graph() G.add_edge(1,2) assert_raises(nx.NetworkXError, nx.topological_sort, G) assert_raises(nx.NetworkXError, nx.topological_sort_recursive, G) def test_topological_sort5(self): G=nx.DiGraph() G.add_edge(0,1) assert_equal(nx.topological_sort_recursive(G), [0,1]) assert_equal(nx.topological_sort(G), [0,1]) def test_nbunch_argument(self): G=nx.DiGraph() G.add_edges_from([(1,2), (2,3), (1,4), (1,5), (2,6)]) assert_equal(nx.topological_sort(G), [1, 2, 3, 6, 4, 5]) assert_equal(nx.topological_sort_recursive(G), [1, 5, 4, 2, 6, 3]) assert_equal(nx.topological_sort(G,[1]), [1, 2, 3, 6, 4, 5]) assert_equal(nx.topological_sort_recursive(G,[1]), [1, 5, 4, 2, 6, 3]) assert_equal(nx.topological_sort(G,[5]), [5]) assert_equal(nx.topological_sort_recursive(G,[5]), [5]) def test_ancestors(self): G=nx.DiGraph() ancestors = nx.algorithms.dag.ancestors G.add_edges_from([ (1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) assert_equal(ancestors(G, 6), set([1, 2, 4, 5])) assert_equal(ancestors(G, 3), set([1, 4])) assert_equal(ancestors(G, 1), set()) assert_raises(nx.NetworkXError, ancestors, G, 8) def test_descendants(self): G=nx.DiGraph() descendants = nx.algorithms.dag.descendants G.add_edges_from([ (1, 2), (1, 3), (4, 2), (4, 3), (4, 5), (2, 6), (5, 6)]) assert_equal(descendants(G, 1), set([2, 3, 6])) assert_equal(descendants(G, 4), set([2, 3, 5, 6])) assert_equal(descendants(G, 3), set()) assert_raises(nx.NetworkXError, descendants, G, 8) def test_is_aperiodic_cycle(): G=nx.DiGraph() G.add_cycle([1,2,3,4]) assert_false(nx.is_aperiodic(G)) def test_is_aperiodic_cycle2(): G=nx.DiGraph() G.add_cycle([1,2,3,4]) G.add_cycle([3,4,5,6,7]) assert_true(nx.is_aperiodic(G)) def test_is_aperiodic_cycle3(): G=nx.DiGraph() G.add_cycle([1,2,3,4]) G.add_cycle([3,4,5,6]) assert_false(nx.is_aperiodic(G)) def test_is_aperiodic_cycle4(): G = nx.DiGraph() G.add_cycle([1,2,3,4]) G.add_edge(1,3) assert_true(nx.is_aperiodic(G)) def test_is_aperiodic_selfloop(): G = nx.DiGraph() G.add_cycle([1,2,3,4]) G.add_edge(1,1) assert_true(nx.is_aperiodic(G)) def test_is_aperiodic_raise(): G = nx.Graph() assert_raises(nx.NetworkXError, nx.is_aperiodic, G) def test_is_aperiodic_bipartite(): #Bipartite graph G = nx.DiGraph(nx.davis_southern_women_graph()) assert_false(nx.is_aperiodic(G)) def test_is_aperiodic_rary_tree(): G = nx.full_rary_tree(3,27,create_using=nx.DiGraph()) assert_false(nx.is_aperiodic(G)) def test_is_aperiodic_disconnected(): #disconnected graph G = nx.DiGraph() G.add_cycle([1,2,3,4]) G.add_cycle([5,6,7,8]) assert_false(nx.is_aperiodic(G)) G.add_edge(1,3) G.add_edge(5,7) assert_true(nx.is_aperiodic(G)) def test_is_aperiodic_disconnected2(): G = nx.DiGraph() G.add_cycle([0,1,2]) G.add_edge(3,3) assert_false(nx.is_aperiodic(G)) networkx-1.8.1/networkx/algorithms/tests/test_boundary.py0000664000175000017500000001052712177456333023732 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx import convert_node_labels_to_integers as cnlti class TestBoundary: def setUp(self): self.null=nx.null_graph() self.P10=cnlti(nx.path_graph(10),first_label=1) self.K10=cnlti(nx.complete_graph(10),first_label=1) def test_null_node_boundary(self): """null graph has empty node boundaries""" null=self.null assert_equal(nx.node_boundary(null,[]),[]) assert_equal(nx.node_boundary(null,[],[]),[]) assert_equal(nx.node_boundary(null,[1,2,3]),[]) assert_equal(nx.node_boundary(null,[1,2,3],[4,5,6]),[]) assert_equal(nx.node_boundary(null,[1,2,3],[3,4,5]),[]) def test_null_edge_boundary(self): """null graph has empty edge boundaries""" null=self.null assert_equal(nx.edge_boundary(null,[]),[]) assert_equal(nx.edge_boundary(null,[],[]),[]) assert_equal(nx.edge_boundary(null,[1,2,3]),[]) assert_equal(nx.edge_boundary(null,[1,2,3],[4,5,6]),[]) assert_equal(nx.edge_boundary(null,[1,2,3],[3,4,5]),[]) def test_path_node_boundary(self): """Check node boundaries in path graph.""" P10=self.P10 assert_equal(nx.node_boundary(P10,[]),[]) assert_equal(nx.node_boundary(P10,[],[]),[]) assert_equal(nx.node_boundary(P10,[1,2,3]),[4]) assert_equal(sorted(nx.node_boundary(P10,[4,5,6])),[3, 7]) assert_equal(sorted(nx.node_boundary(P10,[3,4,5,6,7])),[2, 8]) assert_equal(nx.node_boundary(P10,[8,9,10]),[7]) assert_equal(sorted(nx.node_boundary(P10,[4,5,6],[9,10])),[]) def test_path_edge_boundary(self): """Check edge boundaries in path graph.""" P10=self.P10 assert_equal(nx.edge_boundary(P10,[]),[]) assert_equal(nx.edge_boundary(P10,[],[]),[]) assert_equal(nx.edge_boundary(P10,[1,2,3]),[(3, 4)]) assert_equal(sorted(nx.edge_boundary(P10,[4,5,6])),[(4, 3), (6, 7)]) assert_equal(sorted(nx.edge_boundary(P10,[3,4,5,6,7])),[(3, 2), (7, 8)]) assert_equal(nx.edge_boundary(P10,[8,9,10]),[(8, 7)]) assert_equal(sorted(nx.edge_boundary(P10,[4,5,6],[9,10])),[]) assert_equal(nx.edge_boundary(P10,[1,2,3],[3,4,5]) ,[(2, 3), (3, 4)]) def test_k10_node_boundary(self): """Check node boundaries in K10""" K10=self.K10 assert_equal(nx.node_boundary(K10,[]),[]) assert_equal(nx.node_boundary(K10,[],[]),[]) assert_equal(sorted(nx.node_boundary(K10,[1,2,3])), [4, 5, 6, 7, 8, 9, 10]) assert_equal(sorted(nx.node_boundary(K10,[4,5,6])), [1, 2, 3, 7, 8, 9, 10]) assert_equal(sorted(nx.node_boundary(K10,[3,4,5,6,7])), [1, 2, 8, 9, 10]) assert_equal(nx.node_boundary(K10,[4,5,6],[]),[]) assert_equal(nx.node_boundary(K10,K10),[]) assert_equal(nx.node_boundary(K10,[1,2,3],[3,4,5]),[4, 5]) def test_k10_edge_boundary(self): """Check edge boundaries in K10""" K10=self.K10 assert_equal(nx.edge_boundary(K10,[]),[]) assert_equal(nx.edge_boundary(K10,[],[]),[]) assert_equal(len(nx.edge_boundary(K10,[1,2,3])),21) assert_equal(len(nx.edge_boundary(K10,[4,5,6,7])),24) assert_equal(len(nx.edge_boundary(K10,[3,4,5,6,7])),25) assert_equal(len(nx.edge_boundary(K10,[8,9,10])),21) assert_equal(sorted(nx.edge_boundary(K10,[4,5,6],[9,10])), [(4, 9), (4, 10), (5, 9), (5, 10), (6, 9), (6, 10)]) assert_equal(nx.edge_boundary(K10,[1,2,3],[3,4,5]), [(1, 3), (1, 4), (1, 5), (2, 3), (2, 4), (2, 5), (3, 4), (3, 5)]) def test_petersen(self): """Check boundaries in the petersen graph cheeger(G,k)=min(|bdy(S)|/|S| for |S|=k, 0 0 or len(set([u]) & vset) > 0, \ "not a proper matching!") eq_(1, len(matching), "matching not length 1!") graph = nx.Graph() graph.add_edge(1, 2) graph.add_edge(1, 5) graph.add_edge(2, 3) graph.add_edge(2, 5) graph.add_edge(3, 4) graph.add_edge(3, 6) graph.add_edge(5, 6) matching = nx.maximal_matching(graph) vset = set(u for u, v in matching) vset = vset | set(v for u, v in matching) for edge in graph.edges_iter(): u, v = edge ok_(len(set([v]) & vset) > 0 or len(set([u]) & vset) > 0, \ "not a proper matching!") networkx-1.8.1/networkx/algorithms/operators/0000775000175000017500000000000012177457361021347 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/operators/unary.py0000664000175000017500000000336112177456333023060 0ustar aricaric00000000000000"""Unary operations on graphs""" # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import is_string_like __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) __all__ = ['complement', 'reverse'] def complement(G, name=None): """Return the graph complement of G. Parameters ---------- G : graph A NetworkX graph name : string Specify name for new graph Returns ------- GC : A new graph. Notes ------ Note that complement() does not create self-loops and also does not produce parallel edges for MultiGraphs. Graph, node, and edge data are not propagated to the new graph. """ if name is None: name="complement(%s)"%(G.name) R=G.__class__() R.name=name R.add_nodes_from(G) R.add_edges_from( ((n,n2) for n,nbrs in G.adjacency_iter() for n2 in G if n2 not in nbrs if n != n2) ) return R def reverse(G, copy=True): """Return the reverse directed graph of G. Parameters ---------- G : directed graph A NetworkX directed graph copy : bool If True, then a new graph is returned. If False, then the graph is reversed in place. Returns ------- H : directed graph The reversed G. """ if not G.is_directed(): raise nx.NetworkXError("Cannot reverse an undirected graph.") else: return G.reverse(copy=copy) networkx-1.8.1/networkx/algorithms/operators/product.py0000664000175000017500000002461112177456333023403 0ustar aricaric00000000000000""" Graph products. """ # Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from itertools import product __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)' 'Ben Edwards(bedwards@cs.unm.edu)']) __all__ = ['tensor_product','cartesian_product', 'lexicographic_product', 'strong_product'] def _dict_product(d1,d2): return dict((k,(d1.get(k),d2.get(k))) for k in set(d1)|set(d2)) # Generators for producting graph products def _node_product(G,H): for u,v in product(G, H): yield ((u,v), _dict_product(G.node[u], H.node[v])) def _directed_edges_cross_edges(G,H): if not G.is_multigraph() and not H.is_multigraph(): for u,v,c in G.edges_iter(data=True): for x,y,d in H.edges_iter(data=True): yield (u,x),(v,y),_dict_product(c,d) if not G.is_multigraph() and H.is_multigraph(): for u,v,c in G.edges_iter(data=True): for x,y,k,d in H.edges_iter(data=True,keys=True): yield (u,x),(v,y),k,_dict_product(c,d) if G.is_multigraph() and not H.is_multigraph(): for u,v,k,c in G.edges_iter(data=True,keys=True): for x,y,d in H.edges_iter(data=True): yield (u,x),(v,y),k,_dict_product(c,d) if G.is_multigraph() and H.is_multigraph(): for u,v,j,c in G.edges_iter(data=True,keys=True): for x,y,k,d in H.edges_iter(data=True,keys=True): yield (u,x),(v,y),(j,k),_dict_product(c,d) def _undirected_edges_cross_edges(G,H): if not G.is_multigraph() and not H.is_multigraph(): for u,v,c in G.edges_iter(data=True): for x,y,d in H.edges_iter(data=True): yield (v,x),(u,y),_dict_product(c,d) if not G.is_multigraph() and H.is_multigraph(): for u,v,c in G.edges_iter(data=True): for x,y,k,d in H.edges_iter(data=True,keys=True): yield (v,x),(u,y),k,_dict_product(c,d) if G.is_multigraph() and not H.is_multigraph(): for u,v,k,c in G.edges_iter(data=True,keys=True): for x,y,d in H.edges_iter(data=True): yield (v,x),(u,y),k,_dict_product(c,d) if G.is_multigraph() and H.is_multigraph(): for u,v,j,c in G.edges_iter(data=True,keys=True): for x,y,k,d in H.edges_iter(data=True,keys=True): yield (v,x),(u,y),(j,k),_dict_product(c,d) def _edges_cross_nodes(G,H): if G.is_multigraph(): for u,v,k,d in G.edges_iter(data=True,keys=True): for x in H: yield (u,x),(v,x),k,d else: for u,v,d in G.edges_iter(data=True): for x in H: if H.is_multigraph(): yield (u,x),(v,x),None,d else: yield (u,x),(v,x),d def _nodes_cross_edges(G,H): if H.is_multigraph(): for x in G: for u,v,k,d in H.edges_iter(data=True,keys=True): yield (x,u),(x,v),k,d else: for x in G: for u,v,d in H.edges_iter(data=True): if G.is_multigraph(): yield (x,u),(x,v),None,d else: yield (x,u),(x,v),d def _edges_cross_nodes_and_nodes(G,H): if G.is_multigraph(): for u,v,k,d in G.edges_iter(data=True,keys=True): for x in H: for y in H: yield (u,x),(v,y),k,d else: for u,v,d in G.edges_iter(data=True): for x in H: for y in H: if H.is_multigraph(): yield (u,x),(v,y),None,d else: yield (u,x),(v,y),d def _init_product_graph(G,H): if not G.is_directed() == H.is_directed(): raise nx.NetworkXError("G and H must be both directed or", "both undirected") if G.is_multigraph() or H.is_multigraph(): GH = nx.MultiGraph() else: GH = nx.Graph() if G.is_directed(): GH = GH.to_directed() return GH def tensor_product(G,H): r"""Return the tensor product of G and H. The tensor product P of the graphs G and H has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. P has an edge ((u,v),(x,y)) if and only if (u,v) is an edge in G and (x,y) is an edge in H. Sometimes referred to as the categorical product. Parameters ---------- G, H: graphs Networkx graphs. Returns ------- P: NetworkX graph The tensor product of G and H. P will be a multi-graph if either G or H is a multi-graph. Will be a directed if G and H are directed, and undirected if G and H are undirected. Raises ------ NetworkXError If G and H are not both directed or both undirected. Notes ----- Node attributes in P are two-tuple of the G and H node attributes. Missing attributes are assigned None. For example >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0,a1=True) >>> H.add_node('a',a2='Spam') >>> P = nx.tensor_product(G,H) >>> P.nodes() [(0, 'a')] Edge attributes and edge keys (for multigraphs) are also copied to the new product graph """ GH = _init_product_graph(G,H) GH.add_nodes_from(_node_product(G,H)) GH.add_edges_from(_directed_edges_cross_edges(G,H)) if not GH.is_directed(): GH.add_edges_from(_undirected_edges_cross_edges(G,H)) GH.name = "Tensor product("+G.name+","+H.name+")" return GH def cartesian_product(G,H): """Return the Cartesian product of G and H. The tensor product P of the graphs G and H has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. P has an edge ((u,v),(x,y)) if and only if (u,v) is an edge in G and x==y or and (x,y) is an edge in H and u==v. and (x,y) is an edge in H. Parameters ---------- G, H: graphs Networkx graphs. Returns ------- P: NetworkX graph The Cartesian product of G and H. P will be a multi-graph if either G or H is a multi-graph. Will be a directed if G and H are directed, and undirected if G and H are undirected. Raises ------ NetworkXError If G and H are not both directed or both undirected. Notes ----- Node attributes in P are two-tuple of the G and H node attributes. Missing attributes are assigned None. For example >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0,a1=True) >>> H.add_node('a',a2='Spam') >>> P = nx.cartesian_product(G,H) >>> P.nodes() [(0, 'a')] Edge attributes and edge keys (for multigraphs) are also copied to the new product graph """ if not G.is_directed() == H.is_directed(): raise nx.NetworkXError("G and H must be both directed or", "both undirected") GH = _init_product_graph(G,H) GH.add_nodes_from(_node_product(G,H)) GH.add_edges_from(_edges_cross_nodes(G,H)) GH.add_edges_from(_nodes_cross_edges(G,H)) GH.name = "Cartesian product("+G.name+","+H.name+")" return GH def lexicographic_product(G,H): """Return the lexicographic product of G and H. The lexicographical product P of the graphs G and H has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. P has an edge ((u,v),(x,y)) if and only if (u,v) is an edge in G or u==v and (x,y) is an edge in H. Parameters ---------- G, H: graphs Networkx graphs. Returns ------- P: NetworkX graph The Cartesian product of G and H. P will be a multi-graph if either G or H is a multi-graph. Will be a directed if G and H are directed, and undirected if G and H are undirected. Raises ------ NetworkXError If G and H are not both directed or both undirected. Notes ----- Node attributes in P are two-tuple of the G and H node attributes. Missing attributes are assigned None. For example >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0,a1=True) >>> H.add_node('a',a2='Spam') >>> P = nx.lexicographic_product(G,H) >>> P.nodes() [(0, 'a')] Edge attributes and edge keys (for multigraphs) are also copied to the new product graph """ GH = _init_product_graph(G,H) GH.add_nodes_from(_node_product(G,H)) # Edges in G regardless of H designation GH.add_edges_from(_edges_cross_nodes_and_nodes(G,H)) # For each x in G, only if there is an edge in H GH.add_edges_from(_nodes_cross_edges(G,H)) GH.name = "Lexicographic product("+G.name+","+H.name+")" return GH def strong_product(G,H): """Return the strong product of G and H. The strong product P of the graphs G and H has a node set that is the Cartesian product of the node sets, $V(P)=V(G) \times V(H)$. P has an edge ((u,v),(x,y)) if and only if u==v and (x,y) is an edge in H, or x==y and (u,v) is an edge in G, or (u,v) is an edge in G and (x,y) is an edge in H. Parameters ---------- G, H: graphs Networkx graphs. Returns ------- P: NetworkX graph The Cartesian product of G and H. P will be a multi-graph if either G or H is a multi-graph. Will be a directed if G and H are directed, and undirected if G and H are undirected. Raises ------ NetworkXError If G and H are not both directed or both undirected. Notes ----- Node attributes in P are two-tuple of the G and H node attributes. Missing attributes are assigned None. For example >>> G = nx.Graph() >>> H = nx.Graph() >>> G.add_node(0,a1=True) >>> H.add_node('a',a2='Spam') >>> P = nx.strong_product(G,H) >>> P.nodes() [(0, 'a')] Edge attributes and edge keys (for multigraphs) are also copied to the new product graph """ GH = _init_product_graph(G,H) GH.add_nodes_from(_node_product(G,H)) GH.add_edges_from(_nodes_cross_edges(G,H)) GH.add_edges_from(_edges_cross_nodes(G,H)) GH.add_edges_from(_directed_edges_cross_edges(G,H)) if not GH.is_directed(): GH.add_edges_from(_undirected_edges_cross_edges(G,H)) GH.name = "Strong product("+G.name+","+H.name+")" return GH networkx-1.8.1/networkx/algorithms/operators/binary.py0000664000175000017500000002174012177456333023207 0ustar aricaric00000000000000""" Operations on graphs including union, intersection, difference. """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import is_string_like __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) __all__ = ['union', 'compose', 'disjoint_union', 'intersection', 'difference', 'symmetric_difference'] def union(G, H, rename=(None, None), name=None): """ Return the union of graphs G and H. Graphs G and H must be disjoint, otherwise an exception is raised. Parameters ---------- G,H : graph A NetworkX graph create_using : NetworkX graph Use specified graph for result. Otherwise rename : bool , default=(None, None) Node names of G and H can be changed by specifying the tuple rename=('G-','H-') (for example). Node "u" in G is then renamed "G-u" and "v" in H is renamed "H-v". name : string Specify the name for the union graph Returns ------- U : A union graph with the same type as G. Notes ----- To force a disjoint union with node relabeling, use disjoint_union(G,H) or convert_node_labels_to integers(). Graph, edge, and node attributes are propagated from G and H to the union graph. If a graph attribute is present in both G and H the value from H is used. See Also -------- disjoint_union """ # Union is the same type as G R = G.__class__() if name is None: name = "union( %s, %s )"%(G.name,H.name) R.name = name # rename graph to obtain disjoint node labels def add_prefix(graph, prefix): if prefix is None: return graph def label(x): if is_string_like(x): name=prefix+x else: name=prefix+repr(x) return name return nx.relabel_nodes(graph, label) G = add_prefix(G,rename[0]) H = add_prefix(H,rename[1]) if set(G) & set(H): raise nx.NetworkXError('The node sets of G and H are not disjoint.', 'Use appropriate rename=(Gprefix,Hprefix)' 'or use disjoint_union(G,H).') if G.is_multigraph(): G_edges = G.edges_iter(keys=True, data=True) else: G_edges = G.edges_iter(data=True) if H.is_multigraph(): H_edges = H.edges_iter(keys=True, data=True) else: H_edges = H.edges_iter(data=True) # add nodes R.add_nodes_from(G) R.add_edges_from(G_edges) # add edges R.add_nodes_from(H) R.add_edges_from(H_edges) # add node attributes R.node.update(G.node) R.node.update(H.node) # add graph attributes, H attributes take precedent over G attributes R.graph.update(G.graph) R.graph.update(H.graph) return R def disjoint_union(G,H): """ Return the disjoint union of graphs G and H. This algorithm forces distinct integer node labels. Parameters ---------- G,H : graph A NetworkX graph Returns ------- U : A union graph with the same type as G. Notes ----- A new graph is created, of the same class as G. It is recommended that G and H be either both directed or both undirected. The nodes of G are relabeled 0 to len(G)-1, and the nodes of H are relabeled len(G) to len(G)+len(H)-1. Graph, edge, and node attributes are propagated from G and H to the union graph. If a graph attribute is present in both G and H the value from H is used. """ R1=nx.convert_node_labels_to_integers(G) R2=nx.convert_node_labels_to_integers(H,first_label=len(R1)) R=union(R1,R2) R.name="disjoint_union( %s, %s )"%(G.name,H.name) R.graph.update(G.graph) R.graph.update(H.graph) return R def intersection(G, H): """Return a new graph that contains only the edges that exist in both G and H. The node sets of H and G must be the same. Parameters ---------- G,H : graph A NetworkX graph. G and H must have the same node sets. Returns ------- GH : A new graph with the same type as G. Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph. If you want a new graph of the intersection of G and H with the attributes (including edge data) from G use remove_nodes_from() as follows >>> G=nx.path_graph(3) >>> H=nx.path_graph(5) >>> R=G.copy() >>> R.remove_nodes_from(n for n in G if n not in H) """ # create new graph R=nx.create_empty_copy(G) R.name="Intersection of (%s and %s)"%(G.name, H.name) if set(G)!=set(H): raise nx.NetworkXError("Node sets of graphs are not equal") if G.number_of_edges()<=H.number_of_edges(): if G.is_multigraph(): edges=G.edges_iter(keys=True) else: edges=G.edges_iter() for e in edges: if H.has_edge(*e): R.add_edge(*e) else: if H.is_multigraph(): edges=H.edges_iter(keys=True) else: edges=H.edges_iter() for e in edges: if G.has_edge(*e): R.add_edge(*e) return R def difference(G, H): """Return a new graph that contains the edges that exist in G but not in H. The node sets of H and G must be the same. Parameters ---------- G,H : graph A NetworkX graph. G and H must have the same node sets. Returns ------- D : A new graph with the same type as G. Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph. If you want a new graph of the difference of G and H with with the attributes (including edge data) from G use remove_nodes_from() as follows: >>> G=nx.path_graph(3) >>> H=nx.path_graph(5) >>> R=G.copy() >>> R.remove_nodes_from(n for n in G if n in H) """ # create new graph R=nx.create_empty_copy(G) R.name="Difference of (%s and %s)"%(G.name, H.name) if set(G)!=set(H): raise nx.NetworkXError("Node sets of graphs not equal") if G.is_multigraph(): edges=G.edges_iter(keys=True) else: edges=G.edges_iter() for e in edges: if not H.has_edge(*e): R.add_edge(*e) return R def symmetric_difference(G, H): """Return new graph with edges that exist in either G or H but not both. The node sets of H and G must be the same. Parameters ---------- G,H : graph A NetworkX graph. G and H must have the same node sets. Returns ------- D : A new graph with the same type as G. Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph. """ # create new graph R=nx.create_empty_copy(G) R.name="Symmetric difference of (%s and %s)"%(G.name, H.name) if set(G)!=set(H): raise nx.NetworkXError("Node sets of graphs not equal") gnodes=set(G) # set of nodes in G hnodes=set(H) # set of nodes in H nodes=gnodes.symmetric_difference(hnodes) R.add_nodes_from(nodes) if G.is_multigraph(): edges=G.edges_iter(keys=True) else: edges=G.edges_iter() # we could copy the data here but then this function doesn't # match intersection and difference for e in edges: if not H.has_edge(*e): R.add_edge(*e) if H.is_multigraph(): edges=H.edges_iter(keys=True) else: edges=H.edges_iter() for e in edges: if not G.has_edge(*e): R.add_edge(*e) return R def compose(G, H, name=None): """Return a new graph of G composed with H. Composition is the simple union of the node sets and edge sets. The node sets of G and H need not be disjoint. Parameters ---------- G,H : graph A NetworkX graph name : string Specify name for new graph Returns ------- C: A new graph with the same type as G Notes ----- It is recommended that G and H be either both directed or both undirected. Attributes from H take precedent over attributes from G. """ if name is None: name="compose( %s, %s )"%(G.name,H.name) R=G.__class__() R.name=name R.add_nodes_from(H.nodes()) R.add_nodes_from(G.nodes()) if H.is_multigraph(): R.add_edges_from(H.edges_iter(keys=True,data=True)) else: R.add_edges_from(H.edges_iter(data=True)) if G.is_multigraph(): R.add_edges_from(G.edges_iter(keys=True,data=True)) else: R.add_edges_from(G.edges_iter(data=True)) # add node attributes, H attributes take precedent over G attributes R.node.update(G.node) R.node.update(H.node) # add graph attributes, H attributes take precedent over G attributes R.graph.update(G.graph) R.graph.update(H.graph) return R networkx-1.8.1/networkx/algorithms/operators/__init__.py0000664000175000017500000000031112177456333023451 0ustar aricaric00000000000000from networkx.algorithms.operators.all import * from networkx.algorithms.operators.binary import * from networkx.algorithms.operators.product import * from networkx.algorithms.operators.unary import * networkx-1.8.1/networkx/algorithms/operators/tests/0000775000175000017500000000000012177457361022511 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/operators/tests/test_binary.py0000664000175000017500000001724212177456333025412 0ustar aricaric00000000000000from nose.tools import * import networkx as nx from networkx import * from networkx.testing import * def test_union_attributes(): g = nx.Graph() g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) g.graph['name'] = 'g' h = g.copy() h.graph['name'] = 'h' h.graph['attr'] = 'attr' h.node[0]['x'] = 7 gh = nx.union(g, h, rename=('g', 'h')) assert_equal( set(gh.nodes()) , set(['h0', 'h1', 'g0', 'g1']) ) for n in gh: graph, node = n assert_equal( gh.node[n], eval(graph).node[int(node)] ) assert_equal(gh.graph['attr'],'attr') assert_equal(gh.graph['name'],'h') # h graph attributes take precendent def test_intersection(): G=nx.Graph() H=nx.Graph() G.add_nodes_from([1,2,3,4]) G.add_edge(1,2) G.add_edge(2,3) H.add_nodes_from([1,2,3,4]) H.add_edge(2,3) H.add_edge(3,4) I=nx.intersection(G,H) assert_equal( set(I.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(I.edges()) , [(2,3)] ) def test_intersection_attributes(): g = nx.Graph() g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) g.graph['name'] = 'g' h = g.copy() h.graph['name'] = 'h' h.graph['attr'] = 'attr' h.node[0]['x'] = 7 gh = nx.intersection(g, h) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , sorted(g.edges()) ) h.remove_node(0) assert_raises(nx.NetworkXError, nx.intersection, g, h) def test_intersection_multigraph_attributes(): g = nx.MultiGraph() g.add_edge(0, 1, key=0) g.add_edge(0, 1, key=1) g.add_edge(0, 1, key=2) h = nx.MultiGraph() h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.intersection(g, h) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , [(0,1)] ) assert_equal( sorted(gh.edges(keys=True)) , [(0,1,0)] ) def test_difference(): G=nx.Graph() H=nx.Graph() G.add_nodes_from([1,2,3,4]) G.add_edge(1,2) G.add_edge(2,3) H.add_nodes_from([1,2,3,4]) H.add_edge(2,3) H.add_edge(3,4) D=nx.difference(G,H) assert_equal( set(D.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(D.edges()) , [(1,2)] ) D=nx.difference(H,G) assert_equal( set(D.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(D.edges()) , [(3,4)] ) D=nx.symmetric_difference(G,H) assert_equal( set(D.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(D.edges()) , [(1,2),(3,4)] ) def test_difference2(): G=nx.Graph() H=nx.Graph() G.add_nodes_from([1,2,3,4]) H.add_nodes_from([1,2,3,4]) G.add_edge(1,2) H.add_edge(1,2) G.add_edge(2,3) D=nx.difference(G,H) assert_equal( set(D.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(D.edges()) , [(2,3)] ) D=nx.difference(H,G) assert_equal( set(D.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(D.edges()) , [] ) H.add_edge(3,4) D=nx.difference(H,G) assert_equal( set(D.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(D.edges()) , [(3,4)] ) def test_difference_attributes(): g = nx.Graph() g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) g.graph['name'] = 'g' h = g.copy() h.graph['name'] = 'h' h.graph['attr'] = 'attr' h.node[0]['x'] = 7 gh = nx.difference(g, h) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , []) h.remove_node(0) assert_raises(nx.NetworkXError, nx.intersection, g, h) def test_difference_multigraph_attributes(): g = nx.MultiGraph() g.add_edge(0, 1, key=0) g.add_edge(0, 1, key=1) g.add_edge(0, 1, key=2) h = nx.MultiGraph() h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.difference(g, h) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , [(0,1),(0,1)] ) assert_equal( sorted(gh.edges(keys=True)) , [(0,1,1),(0,1,2)] ) @raises(nx.NetworkXError) def test_difference_raise(): G = nx.path_graph(4) H = nx.path_graph(3) GH = nx.difference(G, H) def test_symmetric_difference_multigraph(): g = nx.MultiGraph() g.add_edge(0, 1, key=0) g.add_edge(0, 1, key=1) g.add_edge(0, 1, key=2) h = nx.MultiGraph() h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.symmetric_difference(g, h) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , 3*[(0,1)] ) assert_equal( sorted(sorted(e) for e in gh.edges(keys=True)), [[0,1,1],[0,1,2],[0,1,3]] ) @raises(nx.NetworkXError) def test_symmetric_difference_raise(): G = nx.path_graph(4) H = nx.path_graph(3) GH = nx.symmetric_difference(G, H) def test_union_and_compose(): K3=complete_graph(3) P3=path_graph(3) G1=nx.DiGraph() G1.add_edge('A','B') G1.add_edge('A','C') G1.add_edge('A','D') G2=nx.DiGraph() G2.add_edge('1','2') G2.add_edge('1','3') G2.add_edge('1','4') G=union(G1,G2) H=compose(G1,G2) assert_edges_equal(G.edges(),H.edges()) assert_false(G.has_edge('A',1)) assert_raises(nx.NetworkXError, nx.union, K3, P3) H1=union(H,G1,rename=('H','G1')) assert_equal(sorted(H1.nodes()), ['G1A', 'G1B', 'G1C', 'G1D', 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) H2=union(H,G2,rename=("H","")) assert_equal(sorted(H2.nodes()), ['1', '2', '3', '4', 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) assert_false(H1.has_edge('NB','NA')) G=compose(G,G) assert_edges_equal(G.edges(),H.edges()) G2=union(G2,G2,rename=('','copy')) assert_equal(sorted(G2.nodes()), ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4']) assert_equal(G2.neighbors('copy4'),[]) assert_equal(sorted(G2.neighbors('copy1')),['copy2', 'copy3', 'copy4']) assert_equal(len(G),8) assert_equal(number_of_edges(G),6) E=disjoint_union(G,G) assert_equal(len(E),16) assert_equal(number_of_edges(E),12) E=disjoint_union(G1,G2) assert_equal(sorted(E.nodes()),[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) def test_union_multigraph(): G=nx.MultiGraph() G.add_edge(1,2,key=0) G.add_edge(1,2,key=1) H=nx.MultiGraph() H.add_edge(3,4,key=0) H.add_edge(3,4,key=1) GH=nx.union(G,H) assert_equal( set(GH) , set(G)|set(H)) assert_equal( set(GH.edges(keys=True)) , set(G.edges(keys=True))|set(H.edges(keys=True))) def test_disjoint_union_multigraph(): G=nx.MultiGraph() G.add_edge(0,1,key=0) G.add_edge(0,1,key=1) H=nx.MultiGraph() H.add_edge(2,3,key=0) H.add_edge(2,3,key=1) GH=nx.disjoint_union(G,H) assert_equal( set(GH) , set(G)|set(H)) assert_equal( set(GH.edges(keys=True)) , set(G.edges(keys=True))|set(H.edges(keys=True))) def test_compose_multigraph(): G=nx.MultiGraph() G.add_edge(1,2,key=0) G.add_edge(1,2,key=1) H=nx.MultiGraph() H.add_edge(3,4,key=0) H.add_edge(3,4,key=1) GH=nx.compose(G,H) assert_equal( set(GH) , set(G)|set(H)) assert_equal( set(GH.edges(keys=True)) , set(G.edges(keys=True))|set(H.edges(keys=True))) H.add_edge(1,2,key=2) GH=nx.compose(G,H) assert_equal( set(GH) , set(G)|set(H)) assert_equal( set(GH.edges(keys=True)) , set(G.edges(keys=True))|set(H.edges(keys=True))) networkx-1.8.1/networkx/algorithms/operators/tests/test_unary.py0000664000175000017500000000246012177456333025260 0ustar aricaric00000000000000from nose.tools import * import networkx as nx from networkx import * def test_complement(): null=null_graph() empty1=empty_graph(1) empty10=empty_graph(10) K3=complete_graph(3) K5=complete_graph(5) K10=complete_graph(10) P2=path_graph(2) P3=path_graph(3) P5=path_graph(5) P10=path_graph(10) #complement of the complete graph is empty G=complement(K3) assert_true(is_isomorphic(G,empty_graph(3))) G=complement(K5) assert_true(is_isomorphic(G,empty_graph(5))) # for any G, G=complement(complement(G)) P3cc=complement(complement(P3)) assert_true(is_isomorphic(P3,P3cc)) nullcc=complement(complement(null)) assert_true(is_isomorphic(null,nullcc)) b=bull_graph() bcc=complement(complement(b)) assert_true(is_isomorphic(b,bcc)) def test_complement_2(): G1=nx.DiGraph() G1.add_edge('A','B') G1.add_edge('A','C') G1.add_edge('A','D') G1C=complement(G1) assert_equal(sorted(G1C.edges()), [('B', 'A'), ('B', 'C'), ('B', 'D'), ('C', 'A'), ('C', 'B'), ('C', 'D'), ('D', 'A'), ('D', 'B'), ('D', 'C')]) def test_reverse1(): # Other tests for reverse are done by the DiGraph and MultiDigraph. G1=nx.Graph() assert_raises(nx.NetworkXError, nx.reverse, G1) networkx-1.8.1/networkx/algorithms/operators/tests/test_all.py0000664000175000017500000001112312177456333024666 0ustar aricaric00000000000000from nose.tools import * import networkx as nx from networkx.testing import * def test_union_all_attributes(): g = nx.Graph() g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) g.graph['name'] = 'g' h = g.copy() h.graph['name'] = 'h' h.graph['attr'] = 'attr' h.node[0]['x'] = 7 j = g.copy() j.graph['name'] = 'j' j.graph['attr'] = 'attr' j.node[0]['x'] = 7 ghj = nx.union_all([g, h, j], rename=('g', 'h', 'j')) assert_equal( set(ghj.nodes()) , set(['h0', 'h1', 'g0', 'g1', 'j0', 'j1']) ) for n in ghj: graph, node = n assert_equal( ghj.node[n], eval(graph).node[int(node)] ) assert_equal(ghj.graph['attr'],'attr') assert_equal(ghj.graph['name'],'j') # j graph attributes take precendent def test_intersection_all(): G=nx.Graph() H=nx.Graph() R=nx.Graph() G.add_nodes_from([1,2,3,4]) G.add_edge(1,2) G.add_edge(2,3) H.add_nodes_from([1,2,3,4]) H.add_edge(2,3) H.add_edge(3,4) R.add_nodes_from([1,2,3,4]) R.add_edge(2,3) R.add_edge(4,1) I=nx.intersection_all([G,H,R]) assert_equal( set(I.nodes()) , set([1,2,3,4]) ) assert_equal( sorted(I.edges()) , [(2,3)] ) def test_intersection_all_attributes(): g = nx.Graph() g.add_node(0, x=4) g.add_node(1, x=5) g.add_edge(0, 1, size=5) g.graph['name'] = 'g' h = g.copy() h.graph['name'] = 'h' h.graph['attr'] = 'attr' h.node[0]['x'] = 7 gh = nx.intersection_all([g, h]) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , sorted(g.edges()) ) h.remove_node(0) assert_raises(nx.NetworkXError, nx.intersection, g, h) def test_intersection_all_multigraph_attributes(): g = nx.MultiGraph() g.add_edge(0, 1, key=0) g.add_edge(0, 1, key=1) g.add_edge(0, 1, key=2) h = nx.MultiGraph() h.add_edge(0, 1, key=0) h.add_edge(0, 1, key=3) gh = nx.intersection_all([g, h]) assert_equal( set(gh.nodes()) , set(g.nodes()) ) assert_equal( set(gh.nodes()) , set(h.nodes()) ) assert_equal( sorted(gh.edges()) , [(0,1)] ) assert_equal( sorted(gh.edges(keys=True)) , [(0,1,0)] ) def test_union_all_and_compose_all(): K3=nx.complete_graph(3) P3=nx.path_graph(3) G1=nx.DiGraph() G1.add_edge('A','B') G1.add_edge('A','C') G1.add_edge('A','D') G2=nx.DiGraph() G2.add_edge('1','2') G2.add_edge('1','3') G2.add_edge('1','4') G=nx.union_all([G1,G2]) H=nx.compose_all([G1,G2]) assert_edges_equal(G.edges(),H.edges()) assert_false(G.has_edge('A','1')) assert_raises(nx.NetworkXError, nx.union, K3, P3) H1=nx.union_all([H,G1],rename=('H','G1')) assert_equal(sorted(H1.nodes()), ['G1A', 'G1B', 'G1C', 'G1D', 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) H2=nx.union_all([H,G2],rename=("H","")) assert_equal(sorted(H2.nodes()), ['1', '2', '3', '4', 'H1', 'H2', 'H3', 'H4', 'HA', 'HB', 'HC', 'HD']) assert_false(H1.has_edge('NB','NA')) G=nx.compose_all([G,G]) assert_edges_equal(G.edges(),H.edges()) G2=nx.union_all([G2,G2],rename=('','copy')) assert_equal(sorted(G2.nodes()), ['1', '2', '3', '4', 'copy1', 'copy2', 'copy3', 'copy4']) assert_equal(G2.neighbors('copy4'),[]) assert_equal(sorted(G2.neighbors('copy1')),['copy2', 'copy3', 'copy4']) assert_equal(len(G),8) assert_equal(nx.number_of_edges(G),6) E=nx.disjoint_union_all([G,G]) assert_equal(len(E),16) assert_equal(nx.number_of_edges(E),12) E=nx.disjoint_union_all([G1,G2]) assert_equal(sorted(E.nodes()),[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) G1=nx.DiGraph() G1.add_edge('A','B') G2=nx.DiGraph() G2.add_edge(1,2) G3=nx.DiGraph() G3.add_edge(11,22) G4=nx.union_all([G1,G2,G3],rename=("G1","G2","G3")) assert_equal(sorted(G4.nodes()), ['G1A', 'G1B', 'G21', 'G22', 'G311', 'G322']) def test_union_all_multigraph(): G=nx.MultiGraph() G.add_edge(1,2,key=0) G.add_edge(1,2,key=1) H=nx.MultiGraph() H.add_edge(3,4,key=0) H.add_edge(3,4,key=1) GH=nx.union_all([G,H]) assert_equal( set(GH) , set(G)|set(H)) assert_equal( set(GH.edges(keys=True)) , set(G.edges(keys=True))|set(H.edges(keys=True))) def test_input_output(): l = [nx.Graph([(1,2)]),nx.Graph([(3,4)])] U = nx.disjoint_union_all(l) assert_equal(len(l),2) C = nx.compose_all(l) assert_equal(len(l),2) l = [nx.Graph([(1,2)]),nx.Graph([(1,2)])] R = nx.intersection_all(l) assert_equal(len(l),2) networkx-1.8.1/networkx/algorithms/operators/tests/test_product.py0000664000175000017500000002630712177456333025610 0ustar aricaric00000000000000import networkx as nx from networkx import tensor_product,cartesian_product,lexicographic_product,strong_product from nose.tools import assert_raises, assert_true, assert_equal, raises @raises(nx.NetworkXError) def test_tensor_product_raises(): P = tensor_product(nx.DiGraph(),nx.Graph()) def test_tensor_product_null(): null=nx.null_graph() empty10=nx.empty_graph(10) K3=nx.complete_graph(3) K10=nx.complete_graph(10) P3=nx.path_graph(3) P10=nx.path_graph(10) # null graph G=tensor_product(null,null) assert_true(nx.is_isomorphic(G,null)) # null_graph X anything = null_graph and v.v. G=tensor_product(null,empty10) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(null,K3) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(null,K10) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(null,P3) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(null,P10) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(empty10,null) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(K3,null) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(K10,null) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(P3,null) assert_true(nx.is_isomorphic(G,null)) G=tensor_product(P10,null) assert_true(nx.is_isomorphic(G,null)) def test_tensor_product_size(): P5 = nx.path_graph(5) K3 = nx.complete_graph(3) K5 = nx.complete_graph(5) G=tensor_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) G=tensor_product(K3,K5) assert_equal(nx.number_of_nodes(G),3*5) def test_tensor_product_combinations(): # basic smoke test, more realistic tests would be usefule P5 = nx.path_graph(5) K3 = nx.complete_graph(3) G=tensor_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) G=tensor_product(P5,nx.MultiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) G=tensor_product(nx.MultiGraph(P5),K3) assert_equal(nx.number_of_nodes(G),5*3) G=tensor_product(nx.MultiGraph(P5),nx.MultiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) G=tensor_product(nx.DiGraph(P5),nx.DiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) def test_tensor_product_classic_result(): K2 = nx.complete_graph(2) G = nx.petersen_graph() G = tensor_product(G,K2) assert_true(nx.is_isomorphic(G,nx.desargues_graph())) G = nx.cycle_graph(5) G = tensor_product(G,K2) assert_true(nx.is_isomorphic(G,nx.cycle_graph(10))) G = nx.tetrahedral_graph() G = tensor_product(G,K2) assert_true(nx.is_isomorphic(G,nx.cubical_graph())) def test_tensor_product_random(): G = nx.erdos_renyi_graph(10,2/10.) H = nx.erdos_renyi_graph(10,2/10.) GH = tensor_product(G,H) for (u_G,u_H) in GH.nodes_iter(): for (v_G,v_H) in GH.nodes_iter(): if H.has_edge(u_H,v_H) and G.has_edge(u_G,v_G): assert_true(GH.has_edge((u_G,u_H),(v_G,v_H))) else: assert_true(not GH.has_edge((u_G,u_H),(v_G,v_H))) def test_cartesian_product_multigraph(): G=nx.MultiGraph() G.add_edge(1,2,key=0) G.add_edge(1,2,key=1) H=nx.MultiGraph() H.add_edge(3,4,key=0) H.add_edge(3,4,key=1) GH=cartesian_product(G,H) assert_equal( set(GH) , set([(1, 3), (2, 3), (2, 4), (1, 4)])) assert_equal( set(GH.edges(keys=True)) , set([((1, 3), (2, 3), 0), ((1, 3), (2, 3), 1), ((1, 3), (1, 4), 0), ((1, 3), (1, 4), 1), ((2, 3), (2, 4), 0), ((2, 3), (2, 4), 1), ((2, 4), (1, 4), 0), ((2, 4), (1, 4), 1)])) @raises(nx.NetworkXError) def test_cartesian_product_raises(): P = cartesian_product(nx.DiGraph(),nx.Graph()) def test_cartesian_product_null(): null=nx.null_graph() empty10=nx.empty_graph(10) K3=nx.complete_graph(3) K10=nx.complete_graph(10) P3=nx.path_graph(3) P10=nx.path_graph(10) # null graph G=cartesian_product(null,null) assert_true(nx.is_isomorphic(G,null)) # null_graph X anything = null_graph and v.v. G=cartesian_product(null,empty10) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(null,K3) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(null,K10) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(null,P3) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(null,P10) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(empty10,null) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(K3,null) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(K10,null) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(P3,null) assert_true(nx.is_isomorphic(G,null)) G=cartesian_product(P10,null) assert_true(nx.is_isomorphic(G,null)) def test_cartesian_product_size(): # order(GXH)=order(G)*order(H) K5=nx.complete_graph(5) P5=nx.path_graph(5) K3=nx.complete_graph(3) G=cartesian_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) assert_equal(nx.number_of_edges(G), nx.number_of_edges(P5)*nx.number_of_nodes(K3)+ nx.number_of_edges(K3)*nx.number_of_nodes(P5)) G=cartesian_product(K3,K5) assert_equal(nx.number_of_nodes(G),3*5) assert_equal(nx.number_of_edges(G), nx.number_of_edges(K5)*nx.number_of_nodes(K3)+ nx.number_of_edges(K3)*nx.number_of_nodes(K5)) def test_cartesian_product_classic(): # test some classic product graphs P2 = nx.path_graph(2) P3 = nx.path_graph(3) # cube = 2-path X 2-path G=cartesian_product(P2,P2) G=cartesian_product(P2,G) assert_true(nx.is_isomorphic(G,nx.cubical_graph())) # 3x3 grid G=cartesian_product(P3,P3) assert_true(nx.is_isomorphic(G,nx.grid_2d_graph(3,3))) def test_cartesian_product_random(): G = nx.erdos_renyi_graph(10,2/10.) H = nx.erdos_renyi_graph(10,2/10.) GH = cartesian_product(G,H) for (u_G,u_H) in GH.nodes_iter(): for (v_G,v_H) in GH.nodes_iter(): if (u_G==v_G and H.has_edge(u_H,v_H)) or \ (u_H==v_H and G.has_edge(u_G,v_G)): assert_true(GH.has_edge((u_G,u_H),(v_G,v_H))) else: assert_true(not GH.has_edge((u_G,u_H),(v_G,v_H))) @raises(nx.NetworkXError) def test_lexicographic_product_raises(): P=lexicographic_product(nx.DiGraph(),nx.Graph()) def test_lexicographic_product_null(): null=nx.null_graph() empty10=nx.empty_graph(10) K3=nx.complete_graph(3) K10=nx.complete_graph(10) P3=nx.path_graph(3) P10=nx.path_graph(10) # null graph G=lexicographic_product(null,null) assert_true(nx.is_isomorphic(G,null)) # null_graph X anything = null_graph and v.v. G=lexicographic_product(null,empty10) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(null,K3) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(null,K10) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(null,P3) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(null,P10) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(empty10,null) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(K3,null) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(K10,null) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(P3,null) assert_true(nx.is_isomorphic(G,null)) G=lexicographic_product(P10,null) assert_true(nx.is_isomorphic(G,null)) def test_lexicographic_product_size(): K5=nx.complete_graph(5) P5=nx.path_graph(5) K3=nx.complete_graph(3) G=lexicographic_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) G=lexicographic_product(K3,K5) assert_equal(nx.number_of_nodes(G),3*5) def test_lexicographic_product_combinations(): P5=nx.path_graph(5) K3=nx.complete_graph(3) G=lexicographic_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) G=lexicographic_product(nx.MultiGraph(P5),K3) assert_equal(nx.number_of_nodes(G),5*3) G=lexicographic_product(P5,nx.MultiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) G=lexicographic_product(nx.MultiGraph(P5),nx.MultiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) #No classic easily found classic results for lexicographic product def test_lexicographic_product_random(): G = nx.erdos_renyi_graph(10,2/10.) H = nx.erdos_renyi_graph(10,2/10.) GH = lexicographic_product(G,H) for (u_G,u_H) in GH.nodes_iter(): for (v_G,v_H) in GH.nodes_iter(): if G.has_edge(u_G,v_G) or (u_G==v_G and H.has_edge(u_H,v_H)): assert_true(GH.has_edge((u_G,u_H),(v_G,v_H))) else: assert_true(not GH.has_edge((u_G,u_H),(v_G,v_H))) @raises(nx.NetworkXError) def test_strong_product_raises(): P = strong_product(nx.DiGraph(),nx.Graph()) def test_strong_product_null(): null=nx.null_graph() empty10=nx.empty_graph(10) K3=nx.complete_graph(3) K10=nx.complete_graph(10) P3=nx.path_graph(3) P10=nx.path_graph(10) # null graph G=strong_product(null,null) assert_true(nx.is_isomorphic(G,null)) # null_graph X anything = null_graph and v.v. G=strong_product(null,empty10) assert_true(nx.is_isomorphic(G,null)) G=strong_product(null,K3) assert_true(nx.is_isomorphic(G,null)) G=strong_product(null,K10) assert_true(nx.is_isomorphic(G,null)) G=strong_product(null,P3) assert_true(nx.is_isomorphic(G,null)) G=strong_product(null,P10) assert_true(nx.is_isomorphic(G,null)) G=strong_product(empty10,null) assert_true(nx.is_isomorphic(G,null)) G=strong_product(K3,null) assert_true(nx.is_isomorphic(G,null)) G=strong_product(K10,null) assert_true(nx.is_isomorphic(G,null)) G=strong_product(P3,null) assert_true(nx.is_isomorphic(G,null)) G=strong_product(P10,null) assert_true(nx.is_isomorphic(G,null)) def test_strong_product_size(): K5=nx.complete_graph(5) P5=nx.path_graph(5) K3 = nx.complete_graph(3) G=strong_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) G=strong_product(K3,K5) assert_equal(nx.number_of_nodes(G),3*5) def test_strong_product_combinations(): P5=nx.path_graph(5) K3 = nx.complete_graph(3) G=strong_product(P5,K3) assert_equal(nx.number_of_nodes(G),5*3) G=strong_product(nx.MultiGraph(P5),K3) assert_equal(nx.number_of_nodes(G),5*3) G=strong_product(P5,nx.MultiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) G=strong_product(nx.MultiGraph(P5),nx.MultiGraph(K3)) assert_equal(nx.number_of_nodes(G),5*3) #No classic easily found classic results for strong product def test_strong_product_random(): G = nx.erdos_renyi_graph(10,2/10.) H = nx.erdos_renyi_graph(10,2/10.) GH = strong_product(G,H) for (u_G,u_H) in GH.nodes_iter(): for (v_G,v_H) in GH.nodes_iter(): if (u_G==v_G and H.has_edge(u_H,v_H)) or \ (u_H==v_H and G.has_edge(u_G,v_G)) or \ (G.has_edge(u_G,v_G) and H.has_edge(u_H,v_H)): assert_true(GH.has_edge((u_G,u_H),(v_G,v_H))) else: assert_true(not GH.has_edge((u_G,u_H),(v_G,v_H))) networkx-1.8.1/networkx/algorithms/operators/all.py0000664000175000017500000001016212177456333022467 0ustar aricaric00000000000000"""Operations on many graphs. """ # Copyright (C) 2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. try: from itertools import izip_longest as zip_longest except ImportError: # Python3 has zip_longest from itertools import zip_longest import networkx as nx from networkx.utils import is_string_like __author__ = """\n""".join([ 'Robert King ', 'Aric Hagberg ']) __all__ = ['union_all', 'compose_all', 'disjoint_union_all', 'intersection_all'] def union_all(graphs, rename=(None,) , name=None): """Return the union of all graphs. The graphs must be disjoint, otherwise an exception is raised. Parameters ---------- graphs : list of graphs List of NetworkX graphs rename : bool , default=(None, None) Node names of G and H can be changed by specifying the tuple rename=('G-','H-') (for example). Node "u" in G is then renamed "G-u" and "v" in H is renamed "H-v". name : string Specify the name for the union graph@not_implemnted_for('direct Returns ------- U : a graph with the same type as the first graph in list Notes ----- To force a disjoint union with node relabeling, use disjoint_union_all(G,H) or convert_node_labels_to integers(). Graph, edge, and node attributes are propagated to the union graph. If a graph attribute is present in multiple graphs, then the value from the last graph in the list with that attribute is used. See Also -------- union disjoint_union_all """ graphs_names = zip_longest(graphs,rename) U, gname = next(graphs_names) for H,hname in graphs_names: U = nx.union(U, H, (gname,hname),name=name) gname = None return U def disjoint_union_all(graphs): """Return the disjoint union of all graphs. This operation forces distinct integer node labels starting with 0 for the first graph in the list and numbering consecutively. Parameters ---------- graphs : list List of NetworkX graphs Returns ------- U : A graph with the same type as the first graph in list Notes ----- It is recommended that the graphs be either all directed or all undirected. Graph, edge, and node attributes are propagated to the union graph. If a graph attribute is present in multiple graphs, then the value from the last graph in the list with that attribute is used. """ graphs = iter(graphs) U = next(graphs) for H in graphs: U = nx.disjoint_union(U, H) return U def compose_all(graphs, name=None): """Return the composition of all graphs. Composition is the simple union of the node sets and edge sets. The node sets of the supplied graphs need not be disjoint. Parameters ---------- graphs : list List of NetworkX graphs name : string Specify name for new graph Returns ------- C : A graph with the same type as the first graph in list Notes ----- It is recommended that the supplied graphs be either all directed or all undirected. Graph, edge, and node attributes are propagated to the union graph. If a graph attribute is present in multiple graphs, then the value from the last graph in the list with that attribute is used. """ graphs = iter(graphs) C = next(graphs) for H in graphs: C = nx.compose(C, H, name=name) return C def intersection_all(graphs): """Return a new graph that contains only the edges that exist in all graphs. All supplied graphs must have the same node set. Parameters ---------- graphs_list : list List of NetworkX graphs Returns ------- R : A new graph with the same type as the first graph in list Notes ----- Attributes from the graph, nodes, and edges are not copied to the new graph. """ graphs = iter(graphs) R = next(graphs) for H in graphs: R = nx.intersection(R, H) return R networkx-1.8.1/networkx/algorithms/simple_paths.py0000664000175000017500000000734512177456333022402 0ustar aricaric00000000000000# -*- coding: utf-8 -*- # Copyright (C) 2012 by # Sergio Nery Simoes # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Sérgio Nery Simões ', 'Aric Hagberg ']) __all__ = ['all_simple_paths'] def all_simple_paths(G, source, target, cutoff=None): """Generate all simple paths in the graph G from source to target. A simple path is a path with no repeated nodes. Parameters ---------- G : NetworkX graph source : node Starting node for path target : node Ending node for path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- path_generator: generator A generator that produces lists of simple paths. If there are no paths between the source and target within the given cutoff the generator produces no output. Examples -------- >>> G = nx.complete_graph(4) >>> for path in nx.all_simple_paths(G, source=0, target=3): ... print(path) ... [0, 1, 2, 3] [0, 1, 3] [0, 2, 1, 3] [0, 2, 3] [0, 3] >>> paths = nx.all_simple_paths(G, source=0, target=3, cutoff=2) >>> print(list(paths)) [[0, 1, 3], [0, 2, 3], [0, 3]] Notes ----- This algorithm uses a modified depth-first search to generate the paths [1]_. A single path can be found in `O(V+E)` time but the number of simple paths in a graph can be very large, e.g. `O(n!)` in the complete graph of order n. References ---------- .. [1] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms", Addison Wesley Professional, 3rd ed., 2001. See Also -------- all_shortest_paths, shortest_path """ if source not in G: raise nx.NetworkXError('source node %s not in graph'%source) if target not in G: raise nx.NetworkXError('target node %s not in graph'%target) if cutoff is None: cutoff = len(G)-1 if G.is_multigraph(): return _all_simple_paths_multigraph(G, source, target, cutoff=cutoff) else: return _all_simple_paths_graph(G, source, target, cutoff=cutoff) def _all_simple_paths_graph(G, source, target, cutoff=None): if cutoff < 1: return visited = [source] stack = [iter(G[source])] while stack: children = stack[-1] child = next(children, None) if child is None: stack.pop() visited.pop() elif len(visited) < cutoff: if child == target: yield visited + [target] elif child not in visited: visited.append(child) stack.append(iter(G[child])) else: #len(visited) == cutoff: if child == target or target in children: yield visited + [target] stack.pop() visited.pop() def _all_simple_paths_multigraph(G, source, target, cutoff=None): if cutoff < 1: return visited = [source] stack = [(v for u,v in G.edges(source))] while stack: children = stack[-1] child = next(children, None) if child is None: stack.pop() visited.pop() elif len(visited) < cutoff: if child == target: yield visited + [target] elif child not in visited: visited.append(child) stack.append((v for u,v in G.edges(child))) else: #len(visited) == cutoff: count = ([child]+list(children)).count(target) for i in range(count): yield visited + [target] stack.pop() visited.pop() networkx-1.8.1/networkx/algorithms/clique.py0000664000175000017500000003731512177456333021174 0ustar aricaric00000000000000""" ======= Cliques ======= Find and manipulate cliques of graphs. Note that finding the largest clique of a graph has been shown to be an NP-complete problem; the algorithms here could take a long time to run. http://en.wikipedia.org/wiki/Clique_problem """ # Copyright (C) 2004-2008 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx from networkx.utils.decorators import * __author__ = """Dan Schult (dschult@colgate.edu)""" __all__ = ['find_cliques', 'find_cliques_recursive', 'make_max_clique_graph', 'make_clique_bipartite' ,'graph_clique_number', 'graph_number_of_cliques', 'node_clique_number', 'number_of_cliques', 'cliques_containing_node', 'project_down', 'project_up'] @not_implemented_for('directed') def find_cliques(G): """Search for all maximal cliques in a graph. Maximal cliques are the largest complete subgraph containing a given node. The largest maximal clique is sometimes called the maximum clique. Returns ------- generator of lists: genetor of member list for each maximal clique See Also -------- find_cliques_recursive : A recursive version of the same algorithm Notes ----- To obtain a list of cliques, use list(find_cliques(G)). Based on the algorithm published by Bron & Kerbosch (1973) [1]_ as adapated by Tomita, Tanaka and Takahashi (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. The method essentially unrolls the recursion used in the references to avoid issues of recursion stack depth. This algorithm is not suitable for directed graphs. This algorithm ignores self-loops and parallel edges as clique is not conventionally defined with such edges. There are often many cliques in graphs. This algorithm can run out of memory for large graphs. References ---------- .. [1] Bron, C. and Kerbosch, J. 1973. Algorithm 457: finding all cliques of an undirected graph. Commun. ACM 16, 9 (Sep. 1973), 575-577. http://portal.acm.org/citation.cfm?doid=362342.362367 .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi, The worst-case time complexity for generating all maximal cliques and computational experiments, Theoretical Computer Science, Volume 363, Issue 1, Computing and Combinatorics, 10th Annual International Conference on Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28-42 http://dx.doi.org/10.1016/j.tcs.2006.06.015 .. [3] F. Cazals, C. Karande, A note on the problem of reporting maximal cliques, Theoretical Computer Science, Volume 407, Issues 1-3, 6 November 2008, Pages 564-568, http://dx.doi.org/10.1016/j.tcs.2008.05.010 """ # Cache nbrs and find first pivot (highest degree) maxconn=-1 nnbrs={} pivotnbrs=set() # handle empty graph for n,nbrs in G.adjacency_iter(): nbrs=set(nbrs) nbrs.discard(n) conn = len(nbrs) if conn > maxconn: nnbrs[n] = pivotnbrs = nbrs maxconn = conn else: nnbrs[n] = nbrs # Initial setup cand=set(nnbrs) smallcand = set(cand - pivotnbrs) done=set() stack=[] clique_so_far=[] # Start main loop while smallcand or stack: try: # Any nodes left to check? n=smallcand.pop() except KeyError: # back out clique_so_far cand,done,smallcand = stack.pop() clique_so_far.pop() continue # Add next node to clique clique_so_far.append(n) cand.remove(n) done.add(n) nn=nnbrs[n] new_cand = cand & nn new_done = done & nn # check if we have more to search if not new_cand: if not new_done: # Found a clique! yield clique_so_far[:] clique_so_far.pop() continue # Shortcut--only one node left! if not new_done and len(new_cand)==1: yield clique_so_far + list(new_cand) clique_so_far.pop() continue # find pivot node (max connected in cand) # look in done nodes first numb_cand=len(new_cand) maxconndone=-1 for n in new_done: cn = new_cand & nnbrs[n] conn=len(cn) if conn > maxconndone: pivotdonenbrs=cn maxconndone=conn if maxconndone==numb_cand: break # Shortcut--this part of tree already searched if maxconndone == numb_cand: clique_so_far.pop() continue # still finding pivot node # look in cand nodes second maxconn=-1 for n in new_cand: cn = new_cand & nnbrs[n] conn=len(cn) if conn > maxconn: pivotnbrs=cn maxconn=conn if maxconn == numb_cand-1: break # pivot node is max connected in cand from done or cand if maxconndone > maxconn: pivotnbrs = pivotdonenbrs # save search status for later backout stack.append( (cand, done, smallcand) ) cand=new_cand done=new_done smallcand = cand - pivotnbrs def find_cliques_recursive(G): """Recursive search for all maximal cliques in a graph. Maximal cliques are the largest complete subgraph containing a given point. The largest maximal clique is sometimes called the maximum clique. Returns ------- list of lists: list of members in each maximal clique See Also -------- find_cliques : An nonrecursive version of the same algorithm Notes ----- Based on the algorithm published by Bron & Kerbosch (1973) [1]_ as adapated by Tomita, Tanaka and Takahashi (2006) [2]_ and discussed in Cazals and Karande (2008) [3]_. This implementation returns a list of lists each of which contains the members of a maximal clique. This algorithm ignores self-loops and parallel edges as clique is not conventionally defined with such edges. References ---------- .. [1] Bron, C. and Kerbosch, J. 1973. Algorithm 457: finding all cliques of an undirected graph. Commun. ACM 16, 9 (Sep. 1973), 575-577. http://portal.acm.org/citation.cfm?doid=362342.362367 .. [2] Etsuji Tomita, Akira Tanaka, Haruhisa Takahashi, The worst-case time complexity for generating all maximal cliques and computational experiments, Theoretical Computer Science, Volume 363, Issue 1, Computing and Combinatorics, 10th Annual International Conference on Computing and Combinatorics (COCOON 2004), 25 October 2006, Pages 28-42 http://dx.doi.org/10.1016/j.tcs.2006.06.015 .. [3] F. Cazals, C. Karande, A note on the problem of reporting maximal cliques, Theoretical Computer Science, Volume 407, Issues 1-3, 6 November 2008, Pages 564-568, http://dx.doi.org/10.1016/j.tcs.2008.05.010 """ nnbrs={} for n,nbrs in G.adjacency_iter(): nbrs=set(nbrs) nbrs.discard(n) nnbrs[n]=nbrs if not nnbrs: return [] # empty graph cand=set(nnbrs) done=set() clique_so_far=[] cliques=[] _extend(nnbrs,cand,done,clique_so_far,cliques) return cliques def _extend(nnbrs,cand,done,so_far,cliques): # find pivot node (max connections in cand) maxconn=-1 numb_cand=len(cand) for n in done: cn = cand & nnbrs[n] conn=len(cn) if conn > maxconn: pivotnbrs=cn maxconn=conn if conn==numb_cand: # All possible cliques already found return for n in cand: cn = cand & nnbrs[n] conn=len(cn) if conn > maxconn: pivotnbrs=cn maxconn=conn # Use pivot to reduce number of nodes to examine smallercand = set(cand - pivotnbrs) for n in smallercand: cand.remove(n) so_far.append(n) nn=nnbrs[n] new_cand=cand & nn new_done=done & nn if not new_cand and not new_done: # Found the clique cliques.append(so_far[:]) elif not new_done and len(new_cand) is 1: # shortcut if only one node left cliques.append(so_far+list(new_cand)) else: _extend(nnbrs, new_cand, new_done, so_far, cliques) done.add(so_far.pop()) def make_max_clique_graph(G,create_using=None,name=None): """ Create the maximal clique graph of a graph. Finds the maximal cliques and treats these as nodes. The nodes are connected if they have common members in the original graph. Theory has done a lot with clique graphs, but I haven't seen much on maximal clique graphs. Notes ----- This should be the same as make_clique_bipartite followed by project_up, but it saves all the intermediate steps. """ cliq=list(map(set,find_cliques(G))) if create_using: B=create_using B.clear() else: B=networkx.Graph() if name is not None: B.name=name for i,cl in enumerate(cliq): B.add_node(i+1) for j,other_cl in enumerate(cliq[:i]): # if not cl.isdisjoint(other_cl): #Requires 2.6 intersect=cl & other_cl if intersect: # Not empty B.add_edge(i+1,j+1) return B def make_clique_bipartite(G,fpos=None,create_using=None,name=None): """Create a bipartite clique graph from a graph G. Nodes of G are retained as the "bottom nodes" of B and cliques of G become "top nodes" of B. Edges are present if a bottom node belongs to the clique represented by the top node. Returns a Graph with additional attribute dict B.node_type which is keyed by nodes to "Bottom" or "Top" appropriately. if fpos is not None, a second additional attribute dict B.pos is created to hold the position tuple of each node for viewing the bipartite graph. """ cliq=list(find_cliques(G)) if create_using: B=create_using B.clear() else: B=networkx.Graph() if name is not None: B.name=name B.add_nodes_from(G) B.node_type={} # New Attribute for B for n in B: B.node_type[n]="Bottom" if fpos: B.pos={} # New Attribute for B delta_cpos=1./len(cliq) delta_ppos=1./G.order() cpos=0. ppos=0. for i,cl in enumerate(cliq): name= -i-1 # Top nodes get negative names B.add_node(name) B.node_type[name]="Top" if fpos: if name not in B.pos: B.pos[name]=(0.2,cpos) cpos +=delta_cpos for v in cl: B.add_edge(name,v) if fpos is not None: if v not in B.pos: B.pos[v]=(0.8,ppos) ppos +=delta_ppos return B def project_down(B,create_using=None,name=None): """Project a bipartite graph B down onto its "bottom nodes". The nodes retain their names and are connected if they share a common top node in the bipartite graph. Returns a Graph. """ if create_using: G=create_using G.clear() else: G=networkx.Graph() if name is not None: G.name=name for v,Bvnbrs in B.adjacency_iter(): if B.node_type[v]=="Bottom": G.add_node(v) for cv in Bvnbrs: G.add_edges_from([(v,u) for u in B[cv] if u!=v]) return G def project_up(B,create_using=None,name=None): """Project a bipartite graph B down onto its "bottom nodes". The nodes retain their names and are connected if they share a common Bottom Node in the Bipartite Graph. Returns a Graph. """ if create_using: G=create_using G.clear() else: G=networkx.Graph() if name is not None: G.name=name for v,Bvnbrs in B.adjacency_iter(): if B.node_type[v]=="Top": vname= -v #Change sign of name for Top Nodes G.add_node(vname) for cv in Bvnbrs: # Note: -u changes the name (not Top node anymore) G.add_edges_from([(vname,-u) for u in B[cv] if u!=v]) return G def graph_clique_number(G,cliques=None): """Return the clique number (size of the largest clique) for G. An optional list of cliques can be input if already computed. """ if cliques is None: cliques=find_cliques(G) return max( [len(c) for c in cliques] ) def graph_number_of_cliques(G,cliques=None): """Returns the number of maximal cliques in G. An optional list of cliques can be input if already computed. """ if cliques is None: cliques=list(find_cliques(G)) return len(cliques) def node_clique_number(G,nodes=None,cliques=None): """ Returns the size of the largest maximal clique containing each given node. Returns a single or list depending on input nodes. Optional list of cliques can be input if already computed. """ if cliques is None: if nodes is not None: # Use ego_graph to decrease size of graph if isinstance(nodes,list): d={} for n in nodes: H=networkx.ego_graph(G,n) d[n]=max( (len(c) for c in find_cliques(H)) ) else: H=networkx.ego_graph(G,nodes) d=max( (len(c) for c in find_cliques(H)) ) return d # nodes is None--find all cliques cliques=list(find_cliques(G)) if nodes is None: nodes=G.nodes() # none, get entire graph if not isinstance(nodes, list): # check for a list v=nodes # assume it is a single value d=max([len(c) for c in cliques if v in c]) else: d={} for v in nodes: d[v]=max([len(c) for c in cliques if v in c]) return d # if nodes is None: # none, use entire graph # nodes=G.nodes() # elif not isinstance(nodes, list): # check for a list # nodes=[nodes] # assume it is a single value # if cliques is None: # cliques=list(find_cliques(G)) # d={} # for v in nodes: # d[v]=max([len(c) for c in cliques if v in c]) # if nodes in G: # return d[v] #return single value # return d def number_of_cliques(G,nodes=None,cliques=None): """Returns the number of maximal cliques for each node. Returns a single or list depending on input nodes. Optional list of cliques can be input if already computed. """ if cliques is None: cliques=list(find_cliques(G)) if nodes is None: nodes=G.nodes() # none, get entire graph if not isinstance(nodes, list): # check for a list v=nodes # assume it is a single value numcliq=len([1 for c in cliques if v in c]) else: numcliq={} for v in nodes: numcliq[v]=len([1 for c in cliques if v in c]) return numcliq def cliques_containing_node(G,nodes=None,cliques=None): """Returns a list of cliques containing the given node. Returns a single list or list of lists depending on input nodes. Optional list of cliques can be input if already computed. """ if cliques is None: cliques=list(find_cliques(G)) if nodes is None: nodes=G.nodes() # none, get entire graph if not isinstance(nodes, list): # check for a list v=nodes # assume it is a single value vcliques=[c for c in cliques if v in c] else: vcliques={} for v in nodes: vcliques[v]=[c for c in cliques if v in c] return vcliques networkx-1.8.1/networkx/algorithms/swap.py0000664000175000017500000001362312177456333020660 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Swap edges in a graph. """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import math import random import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult (dschult@colgate.edu)' 'Joel Miller (joel.c.miller.research@gmail.com)' 'Ben Edwards']) __all__ = ['double_edge_swap', 'connected_double_edge_swap'] def double_edge_swap(G, nswap=1, max_tries=100): """Swap two edges in the graph while keeping the node degrees fixed. A double-edge swap removes two randomly chosen edges u-v and x-y and creates the new edges u-x and v-y:: u--v u v becomes | | x--y x y If either the edge u-x or v-y already exist no swap is performed and another attempt is made to find a suitable edge pair. Parameters ---------- G : graph An undirected graph nswap : integer (optional, default=1) Number of double-edge swaps to perform max_tries : integer (optional) Maximum number of attempts to swap edges Returns ------- G : graph The graph after double edge swaps. Notes ----- Does not enforce any connectivity constraints. The graph G is modified in place. """ if G.is_directed(): raise nx.NetworkXError(\ "double_edge_swap() not defined for directed graphs.") if nswap>max_tries: raise nx.NetworkXError("Number of swaps > number of tries allowed.") if len(G) < 4: raise nx.NetworkXError("Graph has less than four nodes.") # Instead of choosing uniformly at random from a generated edge list, # this algorithm chooses nonuniformly from the set of nodes with # probability weighted by degree. n=0 swapcount=0 keys,degrees=zip(*G.degree().items()) # keys, degree cdf=nx.utils.cumulative_distribution(degrees) # cdf of degree while swapcount < nswap: # if random.random() < 0.5: continue # trick to avoid periodicities? # pick two random edges without creating edge list # choose source node indices from discrete distribution (ui,xi)=nx.utils.discrete_sequence(2,cdistribution=cdf) if ui==xi: continue # same source, skip u=keys[ui] # convert index to label x=keys[xi] # choose target uniformly from neighbors v=random.choice(list(G[u])) y=random.choice(list(G[x])) if v==y: continue # same target, skip if (x not in G[u]) and (y not in G[v]): # don't create parallel edges G.add_edge(u,x) G.add_edge(v,y) G.remove_edge(u,v) G.remove_edge(x,y) swapcount+=1 if n >= max_tries: e=('Maximum number of swap attempts (%s) exceeded '%n + 'before desired swaps achieved (%s).'%nswap) raise nx.NetworkXAlgorithmError(e) n+=1 return G def connected_double_edge_swap(G, nswap=1): """Attempt nswap double-edge swaps in the graph G. A double-edge swap removes two randomly chosen edges u-v and x-y and creates the new edges u-x and v-y:: u--v u v becomes | | x--y x y If either the edge u-x or v-y already exist no swap is performed so the actual count of swapped edges is always <= nswap Parameters ---------- G : graph An undirected graph nswap : integer (optional, default=1) Number of double-edge swaps to perform Returns ------- G : int The number of successful swaps Notes ----- The initial graph G must be connected, and the resulting graph is connected. The graph G is modified in place. References ---------- .. [1] C. Gkantsidis and M. Mihail and E. Zegura, The Markov chain simulation method for generating connected power law random graphs, 2003. http://citeseer.ist.psu.edu/gkantsidis03markov.html """ import math if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected") if len(G) < 4: raise nx.NetworkXError("Graph has less than four nodes.") n=0 swapcount=0 deg=G.degree() dk=list(deg.keys()) # Label key for nodes cdf=nx.utils.cumulative_distribution(list(G.degree().values())) window=1 while n < nswap: wcount=0 swapped=[] while wcount < window and n < nswap: # Pick two random edges without creating edge list # Choose source nodes from discrete degree distribution (ui,xi)=nx.utils.discrete_sequence(2,cdistribution=cdf) if ui==xi: continue # same source, skip u=dk[ui] # convert index to label x=dk[xi] # Choose targets uniformly from neighbors v=random.choice(G.neighbors(u)) y=random.choice(G.neighbors(x)) # if v==y: continue # same target, skip if (not G.has_edge(u,x)) and (not G.has_edge(v,y)): G.remove_edge(u,v) G.remove_edge(x,y) G.add_edge(u,x) G.add_edge(v,y) swapped.append((u,v,x,y)) swapcount+=1 n+=1 wcount+=1 if nx.is_connected(G): window+=1 else: # not connected, undo changes from previous window, decrease window while swapped: (u,v,x,y)=swapped.pop() G.add_edge(u,v) G.add_edge(x,y) G.remove_edge(u,x) G.remove_edge(v,y) swapcount-=1 window = int(math.ceil(float(window)/2)) return swapcount networkx-1.8.1/networkx/algorithms/shortest_paths/0000775000175000017500000000000012177457361022403 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/shortest_paths/unweighted.py0000664000175000017500000002244512177456333025125 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Shortest path algorithms for unweighted graphs. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['bidirectional_shortest_path', 'single_source_shortest_path', 'single_source_shortest_path_length', 'all_pairs_shortest_path', 'all_pairs_shortest_path_length', 'predecessor'] import networkx as nx def single_source_shortest_path_length(G,source,cutoff=None): """Compute the shortest path lengths from source to all reachable nodes. Parameters ---------- G : NetworkX graph source : node Starting node for path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- lengths : dictionary Dictionary of shortest path lengths keyed by target. Examples -------- >>> G=nx.path_graph(5) >>> length=nx.single_source_shortest_path_length(G,0) >>> length[4] 4 >>> print(length) {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} See Also -------- shortest_path_length """ seen={} # level (number of hops) when seen in BFS level=0 # the current level nextlevel={source:1} # dict of nodes to check at next level while nextlevel: thislevel=nextlevel # advance to next level nextlevel={} # and start a new list (fringe) for v in thislevel: if v not in seen: seen[v]=level # set the level of vertex v nextlevel.update(G[v]) # add neighbors of v if (cutoff is not None and cutoff <= level): break level=level+1 return seen # return all path lengths as dictionary def all_pairs_shortest_path_length(G,cutoff=None): """ Compute the shortest path lengths between all nodes in G. Parameters ---------- G : NetworkX graph cutoff : integer, optional depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- lengths : dictionary Dictionary of shortest path lengths keyed by source and target. Notes ----- The dictionary returned only has keys for reachable node pairs. Examples -------- >>> G=nx.path_graph(5) >>> length=nx.all_pairs_shortest_path_length(G) >>> print(length[1][4]) 3 >>> length[1] {0: 1, 1: 0, 2: 1, 3: 2, 4: 3} """ paths={} for n in G: paths[n]=single_source_shortest_path_length(G,n,cutoff=cutoff) return paths def bidirectional_shortest_path(G,source,target): """Return a list of nodes in a shortest path between source and target. Parameters ---------- G : NetworkX graph source : node label starting node for path target : node label ending node for path Returns ------- path: list List of nodes in a path from source to target. Raises ------ NetworkXNoPath If no path exists between source and target. See Also -------- shortest_path Notes ----- This algorithm is used by shortest_path(G,source,target). """ # call helper to do the real work results=_bidirectional_pred_succ(G,source,target) pred,succ,w=results # build path from pred+w+succ path=[] # from w to target while w is not None: path.append(w) w=succ[w] # from source to w w=pred[path[0]] while w is not None: path.insert(0,w) w=pred[w] return path def _bidirectional_pred_succ(G, source, target): """Bidirectional shortest path helper. Returns (pred,succ,w) where pred is a dictionary of predecessors from w to the source, and succ is a dictionary of successors from w to the target. """ # does BFS from both source and target and meets in the middle if target == source: return ({target:None},{source:None},source) # handle either directed or undirected if G.is_directed(): Gpred=G.predecessors_iter Gsucc=G.successors_iter else: Gpred=G.neighbors_iter Gsucc=G.neighbors_iter # predecesssor and successors in search pred={source:None} succ={target:None} # initialize fringes, start with forward forward_fringe=[source] reverse_fringe=[target] while forward_fringe and reverse_fringe: if len(forward_fringe) <= len(reverse_fringe): this_level=forward_fringe forward_fringe=[] for v in this_level: for w in Gsucc(v): if w not in pred: forward_fringe.append(w) pred[w]=v if w in succ: return pred,succ,w # found path else: this_level=reverse_fringe reverse_fringe=[] for v in this_level: for w in Gpred(v): if w not in succ: succ[w]=v reverse_fringe.append(w) if w in pred: return pred,succ,w # found path raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) def single_source_shortest_path(G,source,cutoff=None): """Compute shortest path between source and all other nodes reachable from source. Parameters ---------- G : NetworkX graph source : node label Starting node for path cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- lengths : dictionary Dictionary, keyed by target, of shortest paths. Examples -------- >>> G=nx.path_graph(5) >>> path=nx.single_source_shortest_path(G,0) >>> path[4] [0, 1, 2, 3, 4] Notes ----- The shortest path is not necessarily unique. So there can be multiple paths between the source and each target node, all of which have the same 'shortest' length. For each target node, this function returns only one of those paths. See Also -------- shortest_path """ level=0 # the current level nextlevel={source:1} # list of nodes to check at next level paths={source:[source]} # paths dictionary (paths to key from source) if cutoff==0: return paths while nextlevel: thislevel=nextlevel nextlevel={} for v in thislevel: for w in G[v]: if w not in paths: paths[w]=paths[v]+[w] nextlevel[w]=1 level=level+1 if (cutoff is not None and cutoff <= level): break return paths def all_pairs_shortest_path(G,cutoff=None): """ Compute shortest paths between all nodes. Parameters ---------- G : NetworkX graph cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- lengths : dictionary Dictionary, keyed by source and target, of shortest paths. Examples -------- >>> G=nx.path_graph(5) >>> path=nx.all_pairs_shortest_path(G) >>> print(path[0][4]) [0, 1, 2, 3, 4] See Also -------- floyd_warshall() """ paths={} for n in G: paths[n]=single_source_shortest_path(G,n,cutoff=cutoff) return paths def predecessor(G,source,target=None,cutoff=None,return_seen=None): """ Returns dictionary of predecessors for the path from source to all nodes in G. Parameters ---------- G : NetworkX graph source : node label Starting node for path target : node label, optional Ending node for path. If provided only predecessors between source and target are returned cutoff : integer, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- pred : dictionary Dictionary, keyed by node, of predecessors in the shortest path. Examples -------- >>> G=nx.path_graph(4) >>> print(G.nodes()) [0, 1, 2, 3] >>> nx.predecessor(G,0) {0: [], 1: [0], 2: [1], 3: [2]} """ level=0 # the current level nextlevel=[source] # list of nodes to check at next level seen={source:level} # level (number of hops) when seen in BFS pred={source:[]} # predecessor dictionary while nextlevel: level=level+1 thislevel=nextlevel nextlevel=[] for v in thislevel: for w in G[v]: if w not in seen: pred[w]=[v] seen[w]=level nextlevel.append(w) elif (seen[w]==level):# add v to predecessor list if it pred[w].append(v) # is at the correct level if (cutoff and cutoff <= level): break if target is not None: if return_seen: if not target in pred: return ([],-1) # No predecessor return (pred[target],seen[target]) else: if not target in pred: return [] # No predecessor return pred[target] else: if return_seen: return (pred,seen) else: return pred networkx-1.8.1/networkx/algorithms/shortest_paths/astar.py0000664000175000017500000001147112177456333024071 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Shortest paths and path lengths using A* ("A star") algorithm. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from heapq import heappush, heappop from networkx import NetworkXError import networkx as nx __author__ = "\n".join(["Salim Fadhley ", "Matteo Dell'Amico "]) __all__ = ['astar_path', 'astar_path_length'] def astar_path(G, source, target, heuristic=None, weight='weight'): """Return a list of nodes in a shortest path between source and target using the A* ("A-star") algorithm. There may be more than one shortest path. This returns only one. Parameters ---------- G : NetworkX graph source : node Starting node for path target : node Ending node for path heuristic : function A function to evaluate the estimate of the distance from the a node to the target. The function takes two nodes arguments and must return a number. weight: string, optional (default='weight') Edge data key corresponding to the edge weight. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.astar_path(G,0,4)) [0, 1, 2, 3, 4] >>> G=nx.grid_graph(dim=[3,3]) # nodes are two-tuples (x,y) >>> def dist(a, b): ... (x1, y1) = a ... (x2, y2) = b ... return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 >>> print(nx.astar_path(G,(0,0),(2,2),dist)) [(0, 0), (0, 1), (1, 1), (1, 2), (2, 2)] See Also -------- shortest_path, dijkstra_path """ if G.is_multigraph(): raise NetworkXError("astar_path() not implemented for Multi(Di)Graphs") if heuristic is None: # The default heuristic is h=0 - same as Dijkstra's algorithm def heuristic(u, v): return 0 # The queue stores priority, node, cost to reach, and parent. # Uses Python heapq to keep in priority order. # Add each node's hash to the queue to prevent the underlying heap from # attempting to compare the nodes themselves. The hash breaks ties in the # priority and is guarenteed unique for all nodes in the graph. queue = [(0, hash(source), source, 0, None)] # Maps enqueued nodes to distance of discovered paths and the # computed heuristics to target. We avoid computing the heuristics # more than once and inserting the node into the queue too many times. enqueued = {} # Maps explored nodes to parent closest to the source. explored = {} while queue: # Pop the smallest item from queue. _, __, curnode, dist, parent = heappop(queue) if curnode == target: path = [curnode] node = parent while node is not None: path.append(node) node = explored[node] path.reverse() return path if curnode in explored: continue explored[curnode] = parent for neighbor, w in G[curnode].items(): if neighbor in explored: continue ncost = dist + w.get(weight, 1) if neighbor in enqueued: qcost, h = enqueued[neighbor] # if qcost < ncost, a longer path to neighbor remains # enqueued. Removing it would need to filter the whole # queue, it's better just to leave it there and ignore # it when we visit the node a second time. if qcost <= ncost: continue else: h = heuristic(neighbor, target) enqueued[neighbor] = ncost, h heappush(queue, (ncost + h, hash(neighbor), neighbor, ncost, curnode)) raise nx.NetworkXNoPath("Node %s not reachable from %s" % (source, target)) def astar_path_length(G, source, target, heuristic=None, weight='weight'): """Return the length of the shortest path between source and target using the A* ("A-star") algorithm. Parameters ---------- G : NetworkX graph source : node Starting node for path target : node Ending node for path heuristic : function A function to evaluate the estimate of the distance from the a node to the target. The function takes two nodes arguments and must return a number. Raises ------ NetworkXNoPath If no path exists between source and target. See Also -------- astar_path """ path = astar_path(G, source, target, heuristic, weight) return sum(G[u][v].get(weight, 1) for u, v in zip(path[:-1], path[1:])) networkx-1.8.1/networkx/algorithms/shortest_paths/dense.py0000664000175000017500000001162312177456333024054 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Floyd-Warshall algorithm for shortest paths. """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = """Aric Hagberg """ __all__ = ['floyd_warshall', 'floyd_warshall_predecessor_and_distance', 'floyd_warshall_numpy'] def floyd_warshall_numpy(G, nodelist=None, weight='weight'): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters ---------- G : NetworkX graph nodelist : list, optional The rows and columns are ordered by the nodes in nodelist. If nodelist is None then the ordering is produced by G.nodes(). weight: string, optional (default= 'weight') Edge data key corresponding to the edge weight. Returns ------- distance : NumPy matrix A matrix of shortest path distances between nodes. If there is no path between to nodes the corresponding matrix entry will be Inf. Notes ------ Floyd's algorithm is appropriate for finding shortest paths in dense graphs or graphs with negative weights when Dijkstra's algorithm fails. This algorithm can still fail if there are negative cycles. It has running time O(n^3) with running space of O(n^2). """ try: import numpy as np except ImportError: raise ImportError(\ "to_numpy_matrix() requires numpy: http://scipy.org/ ") A = nx.to_numpy_matrix(G, nodelist=nodelist, multigraph_weight=min, weight=weight) n,m = A.shape I = np.identity(n) A[A==0] = np.inf # set zero entries to inf A[I==1] = 0 # except diagonal which should be zero for i in range(n): A = np.minimum(A, A[i,:] + A[:,i]) return A def floyd_warshall_predecessor_and_distance(G, weight='weight'): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters ---------- G : NetworkX graph weight: string, optional (default= 'weight') Edge data key corresponding to the edge weight. Returns ------- predecessor,distance : dictionaries Dictionaries, keyed by source and target, of predecessors and distances in the shortest path. Notes ------ Floyd's algorithm is appropriate for finding shortest paths in dense graphs or graphs with negative weights when Dijkstra's algorithm fails. This algorithm can still fail if there are negative cycles. It has running time O(n^3) with running space of O(n^2). See Also -------- floyd_warshall floyd_warshall_numpy all_pairs_shortest_path all_pairs_shortest_path_length """ from collections import defaultdict # dictionary-of-dictionaries representation for dist and pred # use some defaultdict magick here # for dist the default is the floating point inf value dist = defaultdict(lambda : defaultdict(lambda: float('inf'))) for u in G: dist[u][u] = 0 pred = defaultdict(dict) # initialize path distance dictionary to be the adjacency matrix # also set the distance to self to 0 (zero diagonal) undirected = not G.is_directed() for u,v,d in G.edges(data=True): e_weight = d.get(weight, 1.0) dist[u][v] = min(e_weight, dist[u][v]) pred[u][v] = u if undirected: dist[v][u] = min(e_weight, dist[v][u]) pred[v][u] = v for w in G: for u in G: for v in G: if dist[u][v] > dist[u][w] + dist[w][v]: dist[u][v] = dist[u][w] + dist[w][v] pred[u][v] = pred[w][v] return dict(pred),dict(dist) def floyd_warshall(G, weight='weight'): """Find all-pairs shortest path lengths using Floyd's algorithm. Parameters ---------- G : NetworkX graph weight: string, optional (default= 'weight') Edge data key corresponding to the edge weight. Returns ------- distance : dict A dictionary, keyed by source and target, of shortest paths distances between nodes. Notes ------ Floyd's algorithm is appropriate for finding shortest paths in dense graphs or graphs with negative weights when Dijkstra's algorithm fails. This algorithm can still fail if there are negative cycles. It has running time O(n^3) with running space of O(n^2). See Also -------- floyd_warshall_predecessor_and_distance floyd_warshall_numpy all_pairs_shortest_path all_pairs_shortest_path_length """ # could make this its own function to reduce memory costs return floyd_warshall_predecessor_and_distance(G, weight=weight)[1] # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/algorithms/shortest_paths/generic.py0000664000175000017500000002764412177456333024404 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Compute the shortest paths and path lengths between nodes in the graph. These algorithms work with undirected and directed graphs. For directed graphs the paths can be computed in the reverse order by first flipping the edge orientation using R=G.reverse(copy=False). """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Aric Hagberg ', 'Sérgio Nery Simões ']) __all__ = ['shortest_path', 'all_shortest_paths', 'shortest_path_length', 'average_shortest_path_length', 'has_path'] def has_path(G, source, target): """Return True if G has a path from source to target, False otherwise. Parameters ---------- G : NetworkX graph source : node Starting node for path target : node Ending node for path """ try: sp = nx.shortest_path(G,source, target) except nx.NetworkXNoPath: return False return True def shortest_path(G, source=None, target=None, weight=None): """Compute shortest paths in the graph. Parameters ---------- G : NetworkX graph source : node, optional Starting node for path. If not specified, compute shortest paths using all nodes as source nodes. target : node, optional Ending node for path. If not specified, compute shortest paths using all nodes as target nodes. weight : None or string, optional (default = None) If None, every edge has weight/distance/cost 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Returns ------- path: list or dictionary All returned paths include both the source and target in the path. If the source and target are both specified, return a single list of nodes in a shortest path from the source to the target. If only the source is specified, return a dictionary keyed by targets with a list of nodes in a shortest path from the source to one of the targets. If only the target is specified, return a dictionary keyed by sources with a list of nodes in a shortest path from one of the sources to the target. If neither the source nor target are specified return a dictionary of dictionaries with path[source][target]=[list of nodes in path]. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.shortest_path(G,source=0,target=4)) [0, 1, 2, 3, 4] >>> p=nx.shortest_path(G,source=0) # target not specified >>> p[4] [0, 1, 2, 3, 4] >>> p=nx.shortest_path(G,target=4) # source not specified >>> p[0] [0, 1, 2, 3, 4] >>> p=nx.shortest_path(G) # source,target not specified >>> p[0][4] [0, 1, 2, 3, 4] Notes ----- There may be more than one shortest path between a source and target. This returns only one of them. For digraphs this returns a shortest directed path. To find paths in the reverse direction first use G.reverse(copy=False) to flip the edge orientation. See Also -------- all_pairs_shortest_path() all_pairs_dijkstra_path() single_source_shortest_path() single_source_dijkstra_path() """ if source is None: if target is None: ## Find paths between all pairs. if weight is None: paths=nx.all_pairs_shortest_path(G) else: paths=nx.all_pairs_dijkstra_path(G,weight=weight) else: ## Find paths from all nodes co-accessible to the target. directed = G.is_directed() if directed: G.reverse(copy=False) if weight is None: paths=nx.single_source_shortest_path(G,target) else: paths=nx.single_source_dijkstra_path(G,target,weight=weight) # Now flip the paths so they go from a source to the target. for target in paths: paths[target] = list(reversed(paths[target])) if directed: G.reverse(copy=False) else: if target is None: ## Find paths to all nodes accessible from the source. if weight is None: paths=nx.single_source_shortest_path(G,source) else: paths=nx.single_source_dijkstra_path(G,source,weight=weight) else: ## Find shortest source-target path. if weight is None: paths=nx.bidirectional_shortest_path(G,source,target) else: paths=nx.dijkstra_path(G,source,target,weight) return paths def shortest_path_length(G, source=None, target=None, weight=None): """Compute shortest path lengths in the graph. Parameters ---------- G : NetworkX graph source : node, optional Starting node for path. If not specified, compute shortest path lengths using all nodes as source nodes. target : node, optional Ending node for path. If not specified, compute shortest path lengths using all nodes as target nodes. weight : None or string, optional (default = None) If None, every edge has weight/distance/cost 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Returns ------- length: int or dictionary If the source and target are both specified, return the length of the shortest path from the source to the target. If only the source is specified, return a dictionary keyed by targets whose values are the lengths of the shortest path from the source to one of the targets. If only the target is specified, return a dictionary keyed by sources whose values are the lengths of the shortest path from one of the sources to the target. If neither the source nor target are specified return a dictionary of dictionaries with path[source][target]=L, where L is the length of the shortest path from source to target. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.shortest_path_length(G,source=0,target=4)) 4 >>> p=nx.shortest_path_length(G,source=0) # target not specified >>> p[4] 4 >>> p=nx.shortest_path_length(G,target=4) # source not specified >>> p[0] 4 >>> p=nx.shortest_path_length(G) # source,target not specified >>> p[0][4] 4 Notes ----- The length of the path is always 1 less than the number of nodes involved in the path since the length measures the number of edges followed. For digraphs this returns the shortest directed path length. To find path lengths in the reverse direction use G.reverse(copy=False) first to flip the edge orientation. See Also -------- all_pairs_shortest_path_length() all_pairs_dijkstra_path_length() single_source_shortest_path_length() single_source_dijkstra_path_length() """ if source is None: if target is None: ## Find paths between all pairs. if weight is None: paths=nx.all_pairs_shortest_path_length(G) else: paths=nx.all_pairs_dijkstra_path_length(G, weight=weight) else: ## Find paths from all nodes co-accessible to the target. directed = G.is_directed() if directed: G.reverse(copy=False) if weight is None: paths=nx.single_source_shortest_path_length(G,target) else: paths=nx.single_source_dijkstra_path_length(G,target, weight=weight) if directed: G.reverse(copy=False) else: if target is None: ## Find paths to all nodes accessible from the source. if weight is None: paths=nx.single_source_shortest_path_length(G,source) else: paths=nx.single_source_dijkstra_path_length(G,source,weight=weight) else: ## Find shortest source-target path. if weight is None: p=nx.bidirectional_shortest_path(G,source,target) paths=len(p)-1 else: paths=nx.dijkstra_path_length(G,source,target,weight) return paths def average_shortest_path_length(G, weight=None): r"""Return the average shortest path length. The average shortest path length is .. math:: a =\sum_{s,t \in V} \frac{d(s, t)}{n(n-1)} where `V` is the set of nodes in `G`, `d(s, t)` is the shortest path from `s` to `t`, and `n` is the number of nodes in `G`. Parameters ---------- G : NetworkX graph weight : None or string, optional (default = None) If None, every edge has weight/distance/cost 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Raises ------ NetworkXError: if the graph is not connected. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.average_shortest_path_length(G)) 2.0 For disconnected graphs you can compute the average shortest path length for each component: >>> G=nx.Graph([(1,2),(3,4)]) >>> for g in nx.connected_component_subgraphs(G): ... print(nx.average_shortest_path_length(g)) 1.0 1.0 """ if G.is_directed(): if not nx.is_weakly_connected(G): raise nx.NetworkXError("Graph is not connected.") else: if not nx.is_connected(G): raise nx.NetworkXError("Graph is not connected.") avg=0.0 if weight is None: for node in G: path_length=nx.single_source_shortest_path_length(G, node) avg += sum(path_length.values()) else: for node in G: path_length=nx.single_source_dijkstra_path_length(G, node, weight=weight) avg += sum(path_length.values()) n=len(G) return avg/(n*(n-1)) def all_shortest_paths(G, source, target, weight=None): """Compute all shortest paths in the graph. Parameters ---------- G : NetworkX graph source : node Starting node for path. target : node Ending node for path. weight : None or string, optional (default = None) If None, every edge has weight/distance/cost 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Returns ------- paths: generator of lists A generator of all paths between source and target. Examples -------- >>> G=nx.Graph() >>> G.add_path([0,1,2]) >>> G.add_path([0,10,2]) >>> print([p for p in nx.all_shortest_paths(G,source=0,target=2)]) [[0, 1, 2], [0, 10, 2]] Notes ----- There may be many shortest paths between the source and target. See Also -------- shortest_path() single_source_shortest_path() all_pairs_shortest_path() """ if weight is not None: pred,dist = nx.dijkstra_predecessor_and_distance(G,source,weight=weight) else: pred = nx.predecessor(G,source) if target not in pred: raise nx.NetworkXNoPath() stack = [[target,0]] top = 0 while top >= 0: node,i = stack[top] if node == source: yield [p for p,n in reversed(stack[:top+1])] if len(pred[node]) > i: top += 1 if top == len(stack): stack.append([pred[node][i],0]) else: stack[top] = [pred[node][i],0] else: stack[top-1][1] += 1 top -= 1 networkx-1.8.1/networkx/algorithms/shortest_paths/__init__.py0000664000175000017500000000043612177456333024515 0ustar aricaric00000000000000from networkx.algorithms.shortest_paths.generic import * from networkx.algorithms.shortest_paths.unweighted import * from networkx.algorithms.shortest_paths.weighted import * from networkx.algorithms.shortest_paths.astar import * from networkx.algorithms.shortest_paths.dense import * networkx-1.8.1/networkx/algorithms/shortest_paths/tests/0000775000175000017500000000000012177457361023545 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/shortest_paths/tests/test_unweighted.py0000664000175000017500000000623512177456333027325 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestUnweightedPath: def setUp(self): from networkx import convert_node_labels_to_integers as cnlti self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1,ordering="sorted") self.cycle=nx.cycle_graph(7) self.directed_cycle=nx.cycle_graph(7,create_using=nx.DiGraph()) def test_bidirectional_shortest_path(self): assert_equal(nx.bidirectional_shortest_path(self.cycle,0,3), [0, 1, 2, 3]) assert_equal(nx.bidirectional_shortest_path(self.cycle,0,4), [0, 6, 5, 4]) assert_equal(nx.bidirectional_shortest_path(self.grid,1,12), [1, 2, 3, 4, 8, 12]) assert_equal(nx.bidirectional_shortest_path(self.directed_cycle,0,3), [0, 1, 2, 3]) def test_shortest_path_length(self): assert_equal(nx.shortest_path_length(self.cycle,0,3),3) assert_equal(nx.shortest_path_length(self.grid,1,12),5) assert_equal(nx.shortest_path_length(self.directed_cycle,0,4),4) # now with weights assert_equal(nx.shortest_path_length(self.cycle,0,3,weight=True),3) assert_equal(nx.shortest_path_length(self.grid,1,12,weight=True),5) assert_equal(nx.shortest_path_length(self.directed_cycle,0,4,weight=True),4) def test_single_source_shortest_path(self): p=nx.single_source_shortest_path(self.cycle,0) assert_equal(p[3],[0,1,2,3]) p=nx.single_source_shortest_path(self.cycle,0, cutoff=0) assert_equal(p,{0 : [0]}) def test_single_source_shortest_path_length(self): assert_equal(nx.single_source_shortest_path_length(self.cycle,0), {0:0,1:1,2:2,3:3,4:3,5:2,6:1}) def test_all_pairs_shortest_path(self): p=nx.all_pairs_shortest_path(self.cycle) assert_equal(p[0][3],[0,1,2,3]) p=nx.all_pairs_shortest_path(self.grid) assert_equal(p[1][12],[1, 2, 3, 4, 8, 12]) def test_all_pairs_shortest_path_length(self): l=nx.all_pairs_shortest_path_length(self.cycle) assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1}) l=nx.all_pairs_shortest_path_length(self.grid) assert_equal(l[1][16],6) def test_predecessor(self): G=nx.path_graph(4) assert_equal(nx.predecessor(G,0),{0: [], 1: [0], 2: [1], 3: [2]}) assert_equal(nx.predecessor(G,0,3),[2]) G=nx.grid_2d_graph(2,2) assert_equal(sorted(nx.predecessor(G,(0,0)).items()), [((0, 0), []), ((0, 1), [(0, 0)]), ((1, 0), [(0, 0)]), ((1, 1), [(0, 1), (1, 0)])]) def test_predecessor_cutoff(self): G=nx.path_graph(4) p = nx.predecessor(G,0,3) assert_false(4 in p) def test_predecessor_target(self): G=nx.path_graph(4) p = nx.predecessor(G,0,3) assert_equal(p,[2]) p = nx.predecessor(G,0,3,cutoff=2) assert_equal(p,[]) p,s = nx.predecessor(G,0,3,return_seen=True) assert_equal(p,[2]) assert_equal(s,3) p,s = nx.predecessor(G,0,3,cutoff=2,return_seen=True) assert_equal(p,[]) assert_equal(s,-1) networkx-1.8.1/networkx/algorithms/shortest_paths/tests/test_dense.py0000664000175000017500000001037312177456333026256 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx as nx class TestFloyd: def setUp(self): pass def test_floyd_warshall_predecessor_and_distance(self): XG=nx.DiGraph() XG.add_weighted_edges_from([('s','u',10) ,('s','x',5) , ('u','v',1) ,('u','x',2) , ('v','y',1) ,('x','u',3) , ('x','v',5) ,('x','y',2) , ('y','s',7) ,('y','v',6)]) path, dist =nx.floyd_warshall_predecessor_and_distance(XG) assert_equal(dist['s']['v'],9) assert_equal(path['s']['v'],'u') assert_equal(dist, {'y': {'y': 0, 'x': 12, 's': 7, 'u': 15, 'v': 6}, 'x': {'y': 2, 'x': 0, 's': 9, 'u': 3, 'v': 4}, 's': {'y': 7, 'x': 5, 's': 0, 'u': 8, 'v': 9}, 'u': {'y': 2, 'x': 2, 's': 9, 'u': 0, 'v': 1}, 'v': {'y': 1, 'x': 13, 's': 8, 'u': 16, 'v': 0}}) GG=XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 GG['u']['x']['weight']=2 path, dist = nx.floyd_warshall_predecessor_and_distance(GG) assert_equal(dist['s']['v'],8) # skip this test, could be alternate path s-u-v # assert_equal(path['s']['v'],'y') G=nx.DiGraph() # no weights G.add_edges_from([('s','u'), ('s','x'), ('u','v'), ('u','x'), ('v','y'), ('x','u'), ('x','v'), ('x','y'), ('y','s'), ('y','v')]) path, dist = nx.floyd_warshall_predecessor_and_distance(G) assert_equal(dist['s']['v'],2) # skip this test, could be alternate path s-u-v # assert_equal(path['s']['v'],'x') # alternate interface dist = nx.floyd_warshall(G) assert_equal(dist['s']['v'],2) def test_cycle(self): path, dist = nx.floyd_warshall_predecessor_and_distance(nx.cycle_graph(7)) assert_equal(dist[0][3],3) assert_equal(path[0][3],2) assert_equal(dist[0][4],3) def test_weighted(self): XG3=nx.Graph() XG3.add_weighted_edges_from([ [0,1,2],[1,2,12],[2,3,1], [3,4,5],[4,5,1],[5,0,10] ]) path, dist = nx.floyd_warshall_predecessor_and_distance(XG3) assert_equal(dist[0][3],15) assert_equal(path[0][3],2) def test_weighted2(self): XG4=nx.Graph() XG4.add_weighted_edges_from([ [0,1,2],[1,2,2],[2,3,1], [3,4,1],[4,5,1],[5,6,1], [6,7,1],[7,0,1] ]) path, dist = nx.floyd_warshall_predecessor_and_distance(XG4) assert_equal(dist[0][2],4) assert_equal(path[0][2],1) def test_weight_parameter(self): XG4 = nx.Graph() XG4.add_edges_from([ (0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1}) ]) path, dist = nx.floyd_warshall_predecessor_and_distance(XG4, weight='heavy') assert_equal(dist[0][2], 4) assert_equal(path[0][2], 1) def test_zero_distance(self): XG=nx.DiGraph() XG.add_weighted_edges_from([('s','u',10) ,('s','x',5) , ('u','v',1) ,('u','x',2) , ('v','y',1) ,('x','u',3) , ('x','v',5) ,('x','y',2) , ('y','s',7) ,('y','v',6)]) path, dist =nx.floyd_warshall_predecessor_and_distance(XG) for u in XG: assert_equal(dist[u][u], 0) GG=XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 GG['u']['x']['weight']=2 path, dist = nx.floyd_warshall_predecessor_and_distance(GG) for u in GG: dist[u][u] = 0 networkx-1.8.1/networkx/algorithms/shortest_paths/tests/test_weighted.py0000664000175000017500000002466412177456333026770 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestWeightedPath: def setUp(self): from networkx import convert_node_labels_to_integers as cnlti self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1,ordering="sorted") self.cycle=nx.cycle_graph(7) self.directed_cycle=nx.cycle_graph(7,create_using=nx.DiGraph()) self.XG=nx.DiGraph() self.XG.add_weighted_edges_from([('s','u',10) ,('s','x',5) , ('u','v',1) ,('u','x',2) , ('v','y',1) ,('x','u',3) , ('x','v',5) ,('x','y',2) , ('y','s',7) ,('y','v',6)]) self.MXG=nx.MultiDiGraph(self.XG) self.MXG.add_edge('s','u',weight=15) self.XG2=nx.DiGraph() self.XG2.add_weighted_edges_from([[1,4,1],[4,5,1], [5,6,1],[6,3,1], [1,3,50],[1,2,100],[2,3,100]]) self.XG3=nx.Graph() self.XG3.add_weighted_edges_from([ [0,1,2],[1,2,12], [2,3,1],[3,4,5], [4,5,1],[5,0,10] ]) self.XG4=nx.Graph() self.XG4.add_weighted_edges_from([ [0,1,2],[1,2,2], [2,3,1],[3,4,1], [4,5,1],[5,6,1], [6,7,1],[7,0,1] ]) self.MXG4=nx.MultiGraph(self.XG4) self.MXG4.add_edge(0,1,weight=3) self.G=nx.DiGraph() # no weights self.G.add_edges_from([('s','u'), ('s','x'), ('u','v'), ('u','x'), ('v','y'), ('x','u'), ('x','v'), ('x','y'), ('y','s'), ('y','v')]) def test_dijkstra(self): (D,P)= nx.single_source_dijkstra(self.XG,'s') assert_equal(P['v'], ['s', 'x', 'u', 'v']) assert_equal(D['v'],9) assert_equal(nx.single_source_dijkstra_path(self.XG,'s')['v'], ['s', 'x', 'u', 'v']) assert_equal(nx.single_source_dijkstra_path_length(self.XG,'s')['v'],9) assert_equal(nx.single_source_dijkstra(self.XG,'s')[1]['v'], ['s', 'x', 'u', 'v']) assert_equal(nx.single_source_dijkstra_path(self.MXG,'s')['v'], ['s', 'x', 'u', 'v']) GG=self.XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 GG['u']['x']['weight']=2 (D,P)= nx.single_source_dijkstra(GG,'s') assert_equal(P['v'] , ['s', 'x', 'u', 'v']) assert_equal(D['v'],8) # uses lower weight of 2 on u<->x edge assert_equal(nx.dijkstra_path(GG,'s','v'), ['s', 'x', 'u', 'v']) assert_equal(nx.dijkstra_path_length(GG,'s','v'),8) assert_equal(nx.dijkstra_path(self.XG2,1,3), [1, 4, 5, 6, 3]) assert_equal(nx.dijkstra_path(self.XG3,0,3), [0, 1, 2, 3]) assert_equal(nx.dijkstra_path_length(self.XG3,0,3),15) assert_equal(nx.dijkstra_path(self.XG4,0,2), [0, 1, 2]) assert_equal(nx.dijkstra_path_length(self.XG4,0,2), 4) assert_equal(nx.dijkstra_path(self.MXG4,0,2), [0, 1, 2]) assert_equal(nx.single_source_dijkstra(self.G,'s','v')[1]['v'], ['s', 'u', 'v']) assert_equal(nx.single_source_dijkstra(self.G,'s')[1]['v'], ['s', 'u', 'v']) assert_equal(nx.dijkstra_path(self.G,'s','v'), ['s', 'u', 'v']) assert_equal(nx.dijkstra_path_length(self.G,'s','v'), 2) # NetworkXError: node s not reachable from moon assert_raises(nx.NetworkXNoPath,nx.dijkstra_path,self.G,'s','moon') assert_raises(nx.NetworkXNoPath,nx.dijkstra_path_length,self.G,'s','moon') assert_equal(nx.dijkstra_path(self.cycle,0,3),[0, 1, 2, 3]) assert_equal(nx.dijkstra_path(self.cycle,0,4), [0, 6, 5, 4]) assert_equal(nx.single_source_dijkstra(self.cycle,0,0),({0:0}, {0:[0]}) ) def test_bidirectional_dijkstra(self): assert_equal(nx.bidirectional_dijkstra(self.XG, 's', 'v'), (9, ['s', 'x', 'u', 'v'])) (dist,path) = nx.bidirectional_dijkstra(self.G,'s','v') assert_equal(dist,2) # skip this test, correct path could also be ['s','u','v'] # assert_equal(nx.bidirectional_dijkstra(self.G,'s','v'), # (2, ['s', 'x', 'v'])) assert_equal(nx.bidirectional_dijkstra(self.cycle,0,3), (3, [0, 1, 2, 3])) assert_equal(nx.bidirectional_dijkstra(self.cycle,0,4), (3, [0, 6, 5, 4])) assert_equal(nx.bidirectional_dijkstra(self.XG3,0,3), (15, [0, 1, 2, 3])) assert_equal(nx.bidirectional_dijkstra(self.XG4,0,2), (4, [0, 1, 2])) # need more tests here assert_equal(nx.dijkstra_path(self.XG,'s','v'), nx.single_source_dijkstra_path(self.XG,'s')['v']) @raises(nx.NetworkXNoPath) def test_bidirectional_dijkstra_no_path(self): G = nx.Graph() G.add_path([1,2,3]) G.add_path([4,5,6]) path = nx.bidirectional_dijkstra(G,1,6) def test_dijkstra_predecessor(self): G=nx.path_graph(4) assert_equal(nx.dijkstra_predecessor_and_distance(G,0), ({0: [], 1: [0], 2: [1], 3: [2]}, {0: 0, 1: 1, 2: 2, 3: 3})) G=nx.grid_2d_graph(2,2) pred,dist=nx.dijkstra_predecessor_and_distance(G,(0,0)) assert_equal(sorted(pred.items()), [((0, 0), []), ((0, 1), [(0, 0)]), ((1, 0), [(0, 0)]), ((1, 1), [(0, 1), (1, 0)])]) assert_equal(sorted(dist.items()), [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)]) XG=nx.DiGraph() XG.add_weighted_edges_from([('s','u',10) ,('s','x',5) , ('u','v',1) ,('u','x',2) , ('v','y',1) ,('x','u',3) , ('x','v',5) ,('x','y',2) , ('y','s',7) ,('y','v',6)]) (P,D)= nx.dijkstra_predecessor_and_distance(XG,'s') assert_equal(P['v'],['u']) assert_equal(D['v'],9) (P,D)= nx.dijkstra_predecessor_and_distance(XG,'s',cutoff=8) assert_false('v' in D) def test_single_source_dijkstra_path_length(self): pl = nx.single_source_dijkstra_path_length assert_equal(pl(self.MXG4,0)[2], 4) spl = pl(self.MXG4,0,cutoff=2) assert_false(2 in spl) def test_bidirectional_dijkstra_multigraph(self): G = nx.MultiGraph() G.add_edge('a', 'b', weight=10) G.add_edge('a', 'b', weight=100) dp= nx.bidirectional_dijkstra(G, 'a', 'b') assert_equal(dp,(10, ['a', 'b'])) def test_dijkstra_pred_distance_multigraph(self): G = nx.MultiGraph() G.add_edge('a', 'b', key='short',foo=5, weight=100) G.add_edge('a', 'b', key='long',bar=1, weight=110) p,d= nx.dijkstra_predecessor_and_distance(G, 'a') assert_equal(p,{'a': [], 'b': ['a']}) assert_equal(d,{'a': 0, 'b': 100}) def test_negative_edge_cycle(self): G = nx.cycle_graph(5, create_using = nx.DiGraph()) assert_equal(nx.negative_edge_cycle(G), False) G.add_edge(8, 9, weight = -7) G.add_edge(9, 8, weight = 3) assert_equal(nx.negative_edge_cycle(G), True) assert_raises(ValueError,nx.single_source_dijkstra_path_length,G,8) assert_raises(ValueError,nx.single_source_dijkstra,G,8) assert_raises(ValueError,nx.dijkstra_predecessor_and_distance,G,8) G.add_edge(9,10) assert_raises(ValueError,nx.bidirectional_dijkstra,G,8,10) def test_bellman_ford(self): # single node graph G = nx.DiGraph() G.add_node(0) assert_equal(nx.bellman_ford(G, 0), ({0: None}, {0: 0})) assert_raises(KeyError, nx.bellman_ford, G, 1) # negative weight cycle G = nx.cycle_graph(5, create_using = nx.DiGraph()) G.add_edge(1, 2, weight = -7) for i in range(5): assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i) G = nx.cycle_graph(5) # undirected Graph G.add_edge(1, 2, weight = -3) for i in range(5): assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, i) # no negative cycle but negative weight G = nx.cycle_graph(5, create_using = nx.DiGraph()) G.add_edge(1, 2, weight = -3) assert_equal(nx.bellman_ford(G, 0), ({0: None, 1: 0, 2: 1, 3: 2, 4: 3}, {0: 0, 1: 1, 2: -2, 3: -1, 4: 0})) # not connected G = nx.complete_graph(6) G.add_edge(10, 11) G.add_edge(10, 12) assert_equal(nx.bellman_ford(G, 0), ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) # not connected, with a component not containing the source that # contains a negative cost cycle. G = nx.complete_graph(6) G.add_edges_from([('A', 'B', {'load': 3}), ('B', 'C', {'load': -10}), ('C', 'A', {'load': 2})]) assert_equal(nx.bellman_ford(G, 0, weight = 'load'), ({0: None, 1: 0, 2: 0, 3: 0, 4: 0, 5: 0}, {0: 0, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1})) # multigraph P, D = nx.bellman_ford(self.MXG,'s') assert_equal(P['v'], 'u') assert_equal(D['v'], 9) P, D = nx.bellman_ford(self.MXG4, 0) assert_equal(P[2], 1) assert_equal(D[2], 4) # other tests (P,D)= nx.bellman_ford(self.XG,'s') assert_equal(P['v'], 'u') assert_equal(D['v'], 9) G=nx.path_graph(4) assert_equal(nx.bellman_ford(G,0), ({0: None, 1: 0, 2: 1, 3: 2}, {0: 0, 1: 1, 2: 2, 3: 3})) assert_equal(nx.bellman_ford(G, 3), ({0: 1, 1: 2, 2: 3, 3: None}, {0: 3, 1: 2, 2: 1, 3: 0})) G=nx.grid_2d_graph(2,2) pred,dist=nx.bellman_ford(G,(0,0)) assert_equal(sorted(pred.items()), [((0, 0), None), ((0, 1), (0, 0)), ((1, 0), (0, 0)), ((1, 1), (0, 1))]) assert_equal(sorted(dist.items()), [((0, 0), 0), ((0, 1), 1), ((1, 0), 1), ((1, 1), 2)]) networkx-1.8.1/networkx/algorithms/shortest_paths/tests/test_generic.py0000664000175000017500000001337712177456333026603 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestGenericPath: def setUp(self): from networkx import convert_node_labels_to_integers as cnlti self.grid=cnlti(nx.grid_2d_graph(4,4),first_label=1,ordering="sorted") self.cycle=nx.cycle_graph(7) self.directed_cycle=nx.cycle_graph(7,create_using=nx.DiGraph()) def test_shortest_path(self): assert_equal(nx.shortest_path(self.cycle,0,3),[0, 1, 2, 3]) assert_equal(nx.shortest_path(self.cycle,0,4),[0, 6, 5, 4]) assert_equal(nx.shortest_path(self.grid,1,12),[1, 2, 3, 4, 8, 12]) assert_equal(nx.shortest_path(self.directed_cycle,0,3),[0, 1, 2, 3]) # now with weights assert_equal(nx.shortest_path(self.cycle,0,3,weight='weight'),[0, 1, 2, 3]) assert_equal(nx.shortest_path(self.cycle,0,4,weight='weight'),[0, 6, 5, 4]) assert_equal(nx.shortest_path(self.grid,1,12,weight='weight'),[1, 2, 3, 4, 8, 12]) assert_equal(nx.shortest_path(self.directed_cycle,0,3,weight='weight'), [0, 1, 2, 3]) def test_shortest_path_target(self): sp = nx.shortest_path(nx.path_graph(3), target=1) assert_equal(sp, {0: [0, 1], 1: [1], 2: [2, 1]}) def test_shortest_path_length(self): assert_equal(nx.shortest_path_length(self.cycle,0,3),3) assert_equal(nx.shortest_path_length(self.grid,1,12),5) assert_equal(nx.shortest_path_length(self.directed_cycle,0,4),4) # now with weights assert_equal(nx.shortest_path_length(self.cycle,0,3,weight='weight'),3) assert_equal(nx.shortest_path_length(self.grid,1,12,weight='weight'),5) assert_equal(nx.shortest_path_length(self.directed_cycle,0,4,weight='weight'),4) def test_shortest_path_length_target(self): sp = nx.shortest_path_length(nx.path_graph(3), target=1) assert_equal(sp[0], 1) assert_equal(sp[1], 0) assert_equal(sp[2], 1) def test_single_source_shortest_path(self): p=nx.shortest_path(self.cycle,0) assert_equal(p[3],[0,1,2,3]) assert_equal(p,nx.single_source_shortest_path(self.cycle,0)) p=nx.shortest_path(self.grid,1) assert_equal(p[12],[1, 2, 3, 4, 8, 12]) # now with weights p=nx.shortest_path(self.cycle,0,weight='weight') assert_equal(p[3],[0,1,2,3]) assert_equal(p,nx.single_source_dijkstra_path(self.cycle,0)) p=nx.shortest_path(self.grid,1,weight='weight') assert_equal(p[12],[1, 2, 3, 4, 8, 12]) def test_single_source_shortest_path_length(self): l=nx.shortest_path_length(self.cycle,0) assert_equal(l,{0:0,1:1,2:2,3:3,4:3,5:2,6:1}) assert_equal(l,nx.single_source_shortest_path_length(self.cycle,0)) l=nx.shortest_path_length(self.grid,1) assert_equal(l[16],6) # now with weights l=nx.shortest_path_length(self.cycle,0,weight='weight') assert_equal(l,{0:0,1:1,2:2,3:3,4:3,5:2,6:1}) assert_equal(l,nx.single_source_dijkstra_path_length(self.cycle,0)) l=nx.shortest_path_length(self.grid,1,weight='weight') assert_equal(l[16],6) def test_all_pairs_shortest_path(self): p=nx.shortest_path(self.cycle) assert_equal(p[0][3],[0,1,2,3]) assert_equal(p,nx.all_pairs_shortest_path(self.cycle)) p=nx.shortest_path(self.grid) assert_equal(p[1][12],[1, 2, 3, 4, 8, 12]) # now with weights p=nx.shortest_path(self.cycle,weight='weight') assert_equal(p[0][3],[0,1,2,3]) assert_equal(p,nx.all_pairs_dijkstra_path(self.cycle)) p=nx.shortest_path(self.grid,weight='weight') assert_equal(p[1][12],[1, 2, 3, 4, 8, 12]) def test_all_pairs_shortest_path_length(self): l=nx.shortest_path_length(self.cycle) assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1}) assert_equal(l,nx.all_pairs_shortest_path_length(self.cycle)) l=nx.shortest_path_length(self.grid) assert_equal(l[1][16],6) # now with weights l=nx.shortest_path_length(self.cycle,weight='weight') assert_equal(l[0],{0:0,1:1,2:2,3:3,4:3,5:2,6:1}) assert_equal(l,nx.all_pairs_dijkstra_path_length(self.cycle)) l=nx.shortest_path_length(self.grid,weight='weight') assert_equal(l[1][16],6) def test_average_shortest_path(self): l=nx.average_shortest_path_length(self.cycle) assert_almost_equal(l,2) l=nx.average_shortest_path_length(nx.path_graph(5)) assert_almost_equal(l,2) def test_weighted_average_shortest_path(self): G=nx.Graph() G.add_cycle(range(7),weight=2) l=nx.average_shortest_path_length(G,weight='weight') assert_almost_equal(l,4) G=nx.Graph() G.add_path(range(5),weight=2) l=nx.average_shortest_path_length(G,weight='weight') assert_almost_equal(l,4) def test_average_shortest_disconnected(self): g = nx.Graph() g.add_nodes_from(range(3)) g.add_edge(0, 1) assert_raises(nx.NetworkXError,nx.average_shortest_path_length,g) g = g.to_directed() assert_raises(nx.NetworkXError,nx.average_shortest_path_length,g) def test_has_path(self): G = nx.Graph() G.add_path(range(3)) G.add_path(range(3,5)) assert_true(nx.has_path(G,0,2)) assert_false(nx.has_path(G,0,4)) def test_all_shortest_paths(self): G = nx.Graph() G.add_path([0,1,2,3]) G.add_path([0,10,20,3]) assert_equal([[0,1,2,3],[0,10,20,3]], sorted(nx.all_shortest_paths(G,0,3))) @raises(nx.NetworkXNoPath) def test_all_shortest_paths_raise(self): G = nx.Graph() G.add_path([0,1,2,3]) G.add_node(4) paths = list(nx.all_shortest_paths(G,0,4)) networkx-1.8.1/networkx/algorithms/shortest_paths/tests/test_astar.py0000664000175000017500000001147712177456333026300 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from random import random, choice class TestAStar: def setUp(self): self.XG=nx.DiGraph() self.XG.add_edges_from([('s','u',{'weight':10}), ('s','x',{'weight':5}), ('u','v',{'weight':1}), ('u','x',{'weight':2}), ('v','y',{'weight':1}), ('x','u',{'weight':3}), ('x','v',{'weight':5}), ('x','y',{'weight':2}), ('y','s',{'weight':7}), ('y','v',{'weight':6})]) def test_random_graph(self): def dist(a, b): (x1, y1) = a (x2, y2) = b return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5 G = nx.Graph() points = [(random(), random()) for _ in range(100)] # Build a path from points[0] to points[-1] to be sure it exists for p1, p2 in zip(points[:-1], points[1:]): G.add_edge(p1, p2, weight=dist(p1, p2)) # Add other random edges for _ in range(100): p1, p2 = choice(points), choice(points) G.add_edge(p1, p2, weight=dist(p1, p2)) path = nx.astar_path(G, points[0], points[-1], dist) assert path == nx.dijkstra_path(G, points[0], points[-1]) def test_astar_directed(self): assert nx.astar_path(self.XG,'s','v')==['s', 'x', 'u', 'v'] assert nx.astar_path_length(self.XG,'s','v')==9 def test_astar_multigraph(self): G=nx.MultiDiGraph(self.XG) assert_raises((TypeError,nx.NetworkXError), nx.astar_path, [G,'s','v']) assert_raises((TypeError,nx.NetworkXError), nx.astar_path_length, [G,'s','v']) def test_astar_undirected(self): GG=self.XG.to_undirected() # make sure we get lower weight # to_undirected might choose either edge with weight 2 or weight 3 GG['u']['x']['weight']=2 GG['y']['v']['weight'] = 2 assert_equal(nx.astar_path(GG,'s','v'),['s', 'x', 'u', 'v']) assert_equal(nx.astar_path_length(GG,'s','v'),8) def test_astar_directed2(self): XG2=nx.DiGraph() XG2.add_edges_from([[1,4,{'weight':1}], [4,5,{'weight':1}], [5,6,{'weight':1}], [6,3,{'weight':1}], [1,3,{'weight':50}], [1,2,{'weight':100}], [2,3,{'weight':100}]]) assert nx.astar_path(XG2,1,3)==[1, 4, 5, 6, 3] def test_astar_undirected2(self): XG3=nx.Graph() XG3.add_edges_from([ [0,1,{'weight':2}], [1,2,{'weight':12}], [2,3,{'weight':1}], [3,4,{'weight':5}], [4,5,{'weight':1}], [5,0,{'weight':10}] ]) assert nx.astar_path(XG3,0,3)==[0, 1, 2, 3] assert nx.astar_path_length(XG3,0,3)==15 def test_astar_undirected3(self): XG4=nx.Graph() XG4.add_edges_from([ [0,1,{'weight':2}], [1,2,{'weight':2}], [2,3,{'weight':1}], [3,4,{'weight':1}], [4,5,{'weight':1}], [5,6,{'weight':1}], [6,7,{'weight':1}], [7,0,{'weight':1}] ]) assert nx.astar_path(XG4,0,2)==[0, 1, 2] assert nx.astar_path_length(XG4,0,2)==4 # >>> MXG4=NX.MultiGraph(XG4) # >>> MXG4.add_edge(0,1,3) # >>> NX.dijkstra_path(MXG4,0,2) # [0, 1, 2] def test_astar_w1(self): G=nx.DiGraph() G.add_edges_from([('s','u'), ('s','x'), ('u','v'), ('u','x'), ('v','y'), ('x','u'), ('x','w'), ('w', 'v'), ('x','y'), ('y','s'), ('y','v')]) assert nx.astar_path(G,'s','v')==['s', 'u', 'v'] assert nx.astar_path_length(G,'s','v')== 2 @raises(nx.NetworkXNoPath) def test_astar_nopath(self): p = nx.astar_path(self.XG,'s','moon') def test_cycle(self): C=nx.cycle_graph(7) assert nx.astar_path(C,0,3)==[0, 1, 2, 3] assert nx.dijkstra_path(C,0,4)==[0, 6, 5, 4] def test_orderable(self): class UnorderableClass: pass node_1 = UnorderableClass() node_2 = UnorderableClass() node_3 = UnorderableClass() node_4 = UnorderableClass() G = nx.Graph() G.add_edge(node_1, node_2) G.add_edge(node_1, node_3) G.add_edge(node_2, node_4) G.add_edge(node_3, node_4) path=nx.algorithms.shortest_paths.astar.astar_path(G, node_1, node_4) networkx-1.8.1/networkx/algorithms/shortest_paths/tests/test_dense_numpy.py0000664000175000017500000000370212177456333027504 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx as nx class TestFloydNumpy(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global numpy global assert_equal global assert_almost_equal try: import numpy from numpy.testing import assert_equal,assert_almost_equal except ImportError: raise SkipTest('NumPy not available.') def test_cycle_numpy(self): dist = nx.floyd_warshall_numpy(nx.cycle_graph(7)) assert_equal(dist[0,3],3) assert_equal(dist[0,4],3) def test_weighted_numpy(self): XG3=nx.Graph() XG3.add_weighted_edges_from([ [0,1,2],[1,2,12],[2,3,1], [3,4,5],[4,5,1],[5,0,10] ]) dist = nx.floyd_warshall_numpy(XG3) assert_equal(dist[0,3],15) def test_weighted_numpy(self): XG4=nx.Graph() XG4.add_weighted_edges_from([ [0,1,2],[1,2,2],[2,3,1], [3,4,1],[4,5,1],[5,6,1], [6,7,1],[7,0,1] ]) dist = nx.floyd_warshall_numpy(XG4) assert_equal(dist[0,2],4) def test_weight_parameter_numpy(self): XG4 = nx.Graph() XG4.add_edges_from([ (0, 1, {'heavy': 2}), (1, 2, {'heavy': 2}), (2, 3, {'heavy': 1}), (3, 4, {'heavy': 1}), (4, 5, {'heavy': 1}), (5, 6, {'heavy': 1}), (6, 7, {'heavy': 1}), (7, 0, {'heavy': 1}) ]) dist = nx.floyd_warshall_numpy(XG4, weight='heavy') assert_equal(dist[0, 2], 4) def test_directed_cycle_numpy(self): G = nx.DiGraph() G.add_cycle([0,1,2,3]) pred,dist = nx.floyd_warshall_predecessor_and_distance(G) D = nx.utils.dict_to_numpy_array(dist) assert_equal(nx.floyd_warshall_numpy(G),D) networkx-1.8.1/networkx/algorithms/shortest_paths/weighted.py0000664000175000017500000005517112177456333024564 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Shortest path algorithms for weighed graphs. """ __author__ = """\n""".join(['Aric Hagberg ', 'Loïc Séguin-C. ', 'Dan Schult ']) # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['dijkstra_path', 'dijkstra_path_length', 'bidirectional_dijkstra', 'single_source_dijkstra', 'single_source_dijkstra_path', 'single_source_dijkstra_path_length', 'all_pairs_dijkstra_path', 'all_pairs_dijkstra_path_length', 'dijkstra_predecessor_and_distance', 'bellman_ford','negative_edge_cycle'] import heapq import networkx as nx from networkx.utils import generate_unique_node def dijkstra_path(G, source, target, weight='weight'): """Returns the shortest path from source to target in a weighted graph G. Parameters ---------- G : NetworkX graph source : node Starting node target : node Ending node weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- path : list List of nodes in a shortest path. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.dijkstra_path(G,0,4)) [0, 1, 2, 3, 4] Notes ------ Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- bidirectional_dijkstra() """ (length,path)=single_source_dijkstra(G, source, target=target, weight=weight) try: return path[target] except KeyError: raise nx.NetworkXNoPath("node %s not reachable from %s"%(source,target)) def dijkstra_path_length(G, source, target, weight='weight'): """Returns the shortest path length from source to target in a weighted graph. Parameters ---------- G : NetworkX graph source : node label starting node for path target : node label ending node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- length : number Shortest path length. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> print(nx.dijkstra_path_length(G,0,4)) 4 Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- bidirectional_dijkstra() """ length=single_source_dijkstra_path_length(G, source, weight=weight) try: return length[target] except KeyError: raise nx.NetworkXNoPath("node %s not reachable from %s"%(source,target)) def single_source_dijkstra_path(G,source, cutoff=None, weight='weight'): """Compute shortest path between source and all other reachable nodes for a weighted graph. Parameters ---------- G : NetworkX graph source : node Starting node for path. weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- paths : dictionary Dictionary of shortest path lengths keyed by target. Examples -------- >>> G=nx.path_graph(5) >>> path=nx.single_source_dijkstra_path(G,0) >>> path[4] [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- single_source_dijkstra() """ (length,path)=single_source_dijkstra(G,source, cutoff = cutoff, weight = weight) return path def single_source_dijkstra_path_length(G, source, cutoff= None, weight= 'weight'): """Compute the shortest path length between source and all other reachable nodes for a weighted graph. Parameters ---------- G : NetworkX graph source : node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight. cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- length : dictionary Dictionary of shortest lengths keyed by target. Examples -------- >>> G=nx.path_graph(5) >>> length=nx.single_source_dijkstra_path_length(G,0) >>> length[4] 4 >>> print(length) {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- single_source_dijkstra() """ dist = {} # dictionary of final distances seen = {source:0} fringe=[] # use heapq with (distance,label) tuples heapq.heappush(fringe,(0,source)) while fringe: (d,v)=heapq.heappop(fringe) if v in dist: continue # already searched this node. dist[v] = d #for ignore,w,edgedata in G.edges_iter(v,data=True): #is about 30% slower than the following if G.is_multigraph(): edata=[] for w,keydata in G[v].items(): minweight=min((dd.get(weight,1) for k,dd in keydata.items())) edata.append((w,{weight:minweight})) else: edata=iter(G[v].items()) for w,edgedata in edata: vw_dist = dist[v] + edgedata.get(weight,1) if cutoff is not None: if vw_dist>cutoff: continue if w in dist: if vw_dist < dist[w]: raise ValueError('Contradictory paths found:', 'negative weights?') elif w not in seen or vw_dist < seen[w]: seen[w] = vw_dist heapq.heappush(fringe,(vw_dist,w)) return dist def single_source_dijkstra(G,source,target=None,cutoff=None,weight='weight'): """Compute shortest paths and lengths in a weighted graph G. Uses Dijkstra's algorithm for shortest paths. Parameters ---------- G : NetworkX graph source : node label Starting node for path target : node label, optional Ending node for path cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance,path : dictionaries Returns a tuple of two dictionaries keyed by node. The first dictionary stores distance from the source. The second stores the path from the source to that node. Examples -------- >>> G=nx.path_graph(5) >>> length,path=nx.single_source_dijkstra(G,0) >>> print(length[4]) 4 >>> print(length) {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} >>> path[4] [0, 1, 2, 3, 4] Notes --------- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. Based on the Python cookbook recipe (119466) at http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466 This algorithm is not guaranteed to work if edge weights are negative or are floating point numbers (overflows and roundoff errors can cause problems). See Also -------- single_source_dijkstra_path() single_source_dijkstra_path_length() """ if source==target: return ({source:0}, {source:[source]}) dist = {} # dictionary of final distances paths = {source:[source]} # dictionary of paths seen = {source:0} fringe=[] # use heapq with (distance,label) tuples heapq.heappush(fringe,(0,source)) while fringe: (d,v)=heapq.heappop(fringe) if v in dist: continue # already searched this node. dist[v] = d if v == target: break #for ignore,w,edgedata in G.edges_iter(v,data=True): #is about 30% slower than the following if G.is_multigraph(): edata=[] for w,keydata in G[v].items(): minweight=min((dd.get(weight,1) for k,dd in keydata.items())) edata.append((w,{weight:minweight})) else: edata=iter(G[v].items()) for w,edgedata in edata: vw_dist = dist[v] + edgedata.get(weight,1) if cutoff is not None: if vw_dist>cutoff: continue if w in dist: if vw_dist < dist[w]: raise ValueError('Contradictory paths found:', 'negative weights?') elif w not in seen or vw_dist < seen[w]: seen[w] = vw_dist heapq.heappush(fringe,(vw_dist,w)) paths[w] = paths[v]+[w] return (dist,paths) def dijkstra_predecessor_and_distance(G,source, cutoff=None, weight='weight'): """Compute shortest path length and predecessors on shortest paths in weighted graphs. Parameters ---------- G : NetworkX graph source : node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- pred,distance : dictionaries Returns two dictionaries representing a list of predecessors of a node and the distance to each node. Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The list of predecessors contains more than one element only when there are more than one shortest paths to the key node. """ push=heapq.heappush pop=heapq.heappop dist = {} # dictionary of final distances pred = {source:[]} # dictionary of predecessors seen = {source:0} fringe=[] # use heapq with (distance,label) tuples push(fringe,(0,source)) while fringe: (d,v)=pop(fringe) if v in dist: continue # already searched this node. dist[v] = d if G.is_multigraph(): edata=[] for w,keydata in G[v].items(): minweight=min((dd.get(weight,1) for k,dd in keydata.items())) edata.append((w,{weight:minweight})) else: edata=iter(G[v].items()) for w,edgedata in edata: vw_dist = dist[v] + edgedata.get(weight,1) if cutoff is not None: if vw_dist>cutoff: continue if w in dist: if vw_dist < dist[w]: raise ValueError('Contradictory paths found:', 'negative weights?') elif w not in seen or vw_dist < seen[w]: seen[w] = vw_dist push(fringe,(vw_dist,w)) pred[w] = [v] elif vw_dist==seen[w]: pred[w].append(v) return (pred,dist) def all_pairs_dijkstra_path_length(G, cutoff=None, weight='weight'): """ Compute shortest path lengths between all nodes in a weighted graph. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance : dictionary Dictionary, keyed by source and target, of shortest path lengths. Examples -------- >>> G=nx.path_graph(5) >>> length=nx.all_pairs_dijkstra_path_length(G) >>> print(length[1][4]) 3 >>> length[1] {0: 1, 1: 0, 2: 1, 3: 2, 4: 3} Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The dictionary returned only has keys for reachable node pairs. """ paths={} for n in G: paths[n]=single_source_dijkstra_path_length(G,n, cutoff=cutoff, weight=weight) return paths def all_pairs_dijkstra_path(G, cutoff=None, weight='weight'): """ Compute shortest paths between all nodes in a weighted graph. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight cutoff : integer or float, optional Depth to stop the search. Only paths of length <= cutoff are returned. Returns ------- distance : dictionary Dictionary, keyed by source and target, of shortest paths. Examples -------- >>> G=nx.path_graph(5) >>> path=nx.all_pairs_dijkstra_path(G) >>> print(path[0][4]) [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. See Also -------- floyd_warshall() """ paths={} for n in G: paths[n]=single_source_dijkstra_path(G, n, cutoff=cutoff, weight=weight) return paths def bellman_ford(G, source, weight = 'weight'): """Compute shortest path lengths and predecessors on shortest paths in weighted graphs. The algorithm has a running time of O(mn) where n is the number of nodes and m is the number of edges. It is slower than Dijkstra but can handle negative edge weights. Parameters ---------- G : NetworkX graph The algorithm works for all types of graphs, including directed graphs and multigraphs. source: node label Starting node for path weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- pred, dist : dictionaries Returns two dictionaries keyed by node to predecessor in the path and to the distance from the source respectively. Raises ------ NetworkXUnbounded If the (di)graph contains a negative cost (di)cycle, the algorithm raises an exception to indicate the presence of the negative cost (di)cycle. Note: any negative weight edge in an undirected graph is a negative cost cycle. Examples -------- >>> import networkx as nx >>> G = nx.path_graph(5, create_using = nx.DiGraph()) >>> pred, dist = nx.bellman_ford(G, 0) >>> pred {0: None, 1: 0, 2: 1, 3: 2, 4: 3} >>> dist {0: 0, 1: 1, 2: 2, 3: 3, 4: 4} >>> from nose.tools import assert_raises >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) >>> G[1][2]['weight'] = -7 >>> assert_raises(nx.NetworkXUnbounded, nx.bellman_ford, G, 0) Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. The dictionaries returned only have keys for nodes reachable from the source. In the case where the (di)graph is not connected, if a component not containing the source contains a negative cost (di)cycle, it will not be detected. """ if source not in G: raise KeyError("Node %s is not found in the graph"%source) numb_nodes = len(G) dist = {source: 0} pred = {source: None} if numb_nodes == 1: return pred, dist if G.is_multigraph(): def get_weight(edge_dict): return min([eattr.get(weight,1) for eattr in edge_dict.values()]) else: def get_weight(edge_dict): return edge_dict.get(weight,1) for i in range(numb_nodes): no_changes=True # Only need edges from nodes in dist b/c all others have dist==inf for u, dist_u in list(dist.items()): # get all edges from nodes in dist for v, edict in G[u].items(): # double loop handles undirected too dist_v = dist_u + get_weight(edict) if v not in dist or dist[v] > dist_v: dist[v] = dist_v pred[v] = u no_changes = False if no_changes: break else: raise nx.NetworkXUnbounded("Negative cost cycle detected.") return pred, dist def negative_edge_cycle(G, weight = 'weight'): """Return True if there exists a negative edge cycle anywhere in G. Parameters ---------- G : NetworkX graph weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- negative_cycle : bool True if a negative edge cycle exists, otherwise False. Examples -------- >>> import networkx as nx >>> G = nx.cycle_graph(5, create_using = nx.DiGraph()) >>> print(nx.negative_edge_cycle(G)) False >>> G[1][2]['weight'] = -7 >>> print(nx.negative_edge_cycle(G)) True Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. This algorithm uses bellman_ford() but finds negative cycles on any component by first adding a new node connected to every node, and starting bellman_ford on that node. It then removes that extra node. """ newnode = generate_unique_node() G.add_edges_from([ (newnode,n) for n in G]) try: bellman_ford(G, newnode, weight) except nx.NetworkXUnbounded: G.remove_node(newnode) return True G.remove_node(newnode) return False def bidirectional_dijkstra(G, source, target, weight = 'weight'): """Dijkstra's algorithm for shortest paths using bidirectional search. Parameters ---------- G : NetworkX graph source : node Starting node. target : node Ending node. weight: string, optional (default='weight') Edge data key corresponding to the edge weight Returns ------- length : number Shortest path length. Returns a tuple of two dictionaries keyed by node. The first dictionary stores distance from the source. The second stores the path from the source to that node. Raises ------ NetworkXNoPath If no path exists between source and target. Examples -------- >>> G=nx.path_graph(5) >>> length,path=nx.bidirectional_dijkstra(G,0,4) >>> print(length) 4 >>> print(path) [0, 1, 2, 3, 4] Notes ----- Edge weight attributes must be numerical. Distances are calculated as sums of weighted edges traversed. In practice bidirectional Dijkstra is much more than twice as fast as ordinary Dijkstra. Ordinary Dijkstra expands nodes in a sphere-like manner from the source. The radius of this sphere will eventually be the length of the shortest path. Bidirectional Dijkstra will expand nodes from both the source and the target, making two spheres of half this radius. Volume of the first sphere is pi*r*r while the others are 2*pi*r/2*r/2, making up half the volume. This algorithm is not guaranteed to work if edge weights are negative or are floating point numbers (overflows and roundoff errors can cause problems). See Also -------- shortest_path shortest_path_length """ if source == target: return (0, [source]) #Init: Forward Backward dists = [{}, {}]# dictionary of final distances paths = [{source:[source]}, {target:[target]}] # dictionary of paths fringe = [[], []] #heap of (distance, node) tuples for extracting next node to expand seen = [{source:0}, {target:0} ]#dictionary of distances to nodes seen #initialize fringe heap heapq.heappush(fringe[0], (0, source)) heapq.heappush(fringe[1], (0, target)) #neighs for extracting correct neighbor information if G.is_directed(): neighs = [G.successors_iter, G.predecessors_iter] else: neighs = [G.neighbors_iter, G.neighbors_iter] #variables to hold shortest discovered path #finaldist = 1e30000 finalpath = [] dir = 1 while fringe[0] and fringe[1]: # choose direction # dir == 0 is forward direction and dir == 1 is back dir = 1-dir # extract closest to expand (dist, v )= heapq.heappop(fringe[dir]) if v in dists[dir]: # Shortest path to v has already been found continue # update distance dists[dir][v] = dist #equal to seen[dir][v] if v in dists[1-dir]: # if we have scanned v in both directions we are done # we have now discovered the shortest path return (finaldist,finalpath) for w in neighs[dir](v): if(dir==0): #forward if G.is_multigraph(): minweight=min((dd.get(weight,1) for k,dd in G[v][w].items())) else: minweight=G[v][w].get(weight,1) vwLength = dists[dir][v] + minweight #G[v][w].get(weight,1) else: #back, must remember to change v,w->w,v if G.is_multigraph(): minweight=min((dd.get(weight,1) for k,dd in G[w][v].items())) else: minweight=G[w][v].get(weight,1) vwLength = dists[dir][v] + minweight #G[w][v].get(weight,1) if w in dists[dir]: if vwLength < dists[dir][w]: raise ValueError("Contradictory paths found: negative weights?") elif w not in seen[dir] or vwLength < seen[dir][w]: # relaxing seen[dir][w] = vwLength heapq.heappush(fringe[dir], (vwLength,w)) paths[dir][w] = paths[dir][v]+[w] if w in seen[0] and w in seen[1]: #see if this path is better than than the already #discovered shortest path totaldist = seen[0][w] + seen[1][w] if finalpath == [] or finaldist > totaldist: finaldist = totaldist revpath = paths[1][w][:] revpath.reverse() finalpath = paths[0][w] + revpath[1:] raise nx.NetworkXNoPath("No path between %s and %s." % (source, target)) networkx-1.8.1/networkx/algorithms/mst.py0000664000175000017500000001633612177456333020515 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Computes minimum spanning tree of a weighted graph. """ # Copyright (C) 2009-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # Loïc Séguin-C. # All rights reserved. # BSD license. __all__ = ['kruskal_mst', 'minimum_spanning_edges', 'minimum_spanning_tree', 'prim_mst_edges', 'prim_mst'] import networkx as nx from heapq import heappop, heappush def minimum_spanning_edges(G,weight='weight',data=True): """Generate edges in a minimum spanning forest of an undirected weighted graph. A minimum spanning tree is a subgraph of the graph (a tree) with the minimum sum of edge weights. A spanning forest is a union of the spanning trees for each connected component of the graph. Parameters ---------- G : NetworkX Graph weight : string Edge data key to use for weight (default 'weight'). data : bool, optional If True yield the edge data along with the edge. Returns ------- edges : iterator A generator that produces edges in the minimum spanning tree. The edges are three-tuples (u,v,w) where w is the weight. Examples -------- >>> G=nx.cycle_graph(4) >>> G.add_edge(0,3,weight=2) # assign weight 2 to edge 0-3 >>> mst=nx.minimum_spanning_edges(G,data=False) # a generator of MST edges >>> edgelist=list(mst) # make a list of the edges >>> print(sorted(edgelist)) [(0, 1), (1, 2), (2, 3)] Notes ----- Uses Kruskal's algorithm. If the graph edges do not have a weight attribute a default weight of 1 will be used. Modified code from David Eppstein, April 2006 http://www.ics.uci.edu/~eppstein/PADS/ """ # Modified code from David Eppstein, April 2006 # http://www.ics.uci.edu/~eppstein/PADS/ # Kruskal's algorithm: sort edges by weight, and add them one at a time. # We use Kruskal's algorithm, first because it is very simple to # implement once UnionFind exists, and second, because the only slow # part (the sort) is sped up by being built in to Python. from networkx.utils import UnionFind if G.is_directed(): raise nx.NetworkXError( "Mimimum spanning tree not defined for directed graphs.") subtrees = UnionFind() edges = sorted(G.edges(data=True),key=lambda t: t[2].get(weight,1)) for u,v,d in edges: if subtrees[u] != subtrees[v]: if data: yield (u,v,d) else: yield (u,v) subtrees.union(u,v) def minimum_spanning_tree(G,weight='weight'): """Return a minimum spanning tree or forest of an undirected weighted graph. A minimum spanning tree is a subgraph of the graph (a tree) with the minimum sum of edge weights. If the graph is not connected a spanning forest is constructed. A spanning forest is a union of the spanning trees for each connected component of the graph. Parameters ---------- G : NetworkX Graph weight : string Edge data key to use for weight (default 'weight'). Returns ------- G : NetworkX Graph A minimum spanning tree or forest. Examples -------- >>> G=nx.cycle_graph(4) >>> G.add_edge(0,3,weight=2) # assign weight 2 to edge 0-3 >>> T=nx.minimum_spanning_tree(G) >>> print(sorted(T.edges(data=True))) [(0, 1, {}), (1, 2, {}), (2, 3, {})] Notes ----- Uses Kruskal's algorithm. If the graph edges do not have a weight attribute a default weight of 1 will be used. """ T=nx.Graph(nx.minimum_spanning_edges(G,weight=weight,data=True)) # Add isolated nodes if len(T)!=len(G): T.add_nodes_from([n for n,d in G.degree().items() if d==0]) # Add node and graph attributes as shallow copy for n in T: T.node[n]=G.node[n].copy() T.graph=G.graph.copy() return T kruskal_mst=minimum_spanning_tree def prim_mst_edges(G, weight = 'weight', data = True): """Generate edges in a minimum spanning forest of an undirected weighted graph. A minimum spanning tree is a subgraph of the graph (a tree) with the minimum sum of edge weights. A spanning forest is a union of the spanning trees for each connected component of the graph. Parameters ---------- G : NetworkX Graph weight : string Edge data key to use for weight (default 'weight'). data : bool, optional If True yield the edge data along with the edge. Returns ------- edges : iterator A generator that produces edges in the minimum spanning tree. The edges are three-tuples (u,v,w) where w is the weight. Examples -------- >>> G=nx.cycle_graph(4) >>> G.add_edge(0,3,weight=2) # assign weight 2 to edge 0-3 >>> mst=nx.prim_mst_edges(G,data=False) # a generator of MST edges >>> edgelist=list(mst) # make a list of the edges >>> print(sorted(edgelist)) [(0, 1), (1, 2), (2, 3)] Notes ----- Uses Prim's algorithm. If the graph edges do not have a weight attribute a default weight of 1 will be used. """ if G.is_directed(): raise nx.NetworkXError( "Mimimum spanning tree not defined for directed graphs.") nodes = G.nodes() while nodes: u = nodes.pop(0) frontier = [] visited = [u] for u, v in G.edges(u): heappush(frontier, (G[u][v].get(weight, 1), u, v)) while frontier: W, u, v = heappop(frontier) if v in visited: continue visited.append(v) nodes.remove(v) for v, w in G.edges(v): if not w in visited: heappush(frontier, (G[v][w].get(weight, 1), v, w)) if data: yield u, v, G[u][v] else: yield u, v def prim_mst(G, weight = 'weight'): """Return a minimum spanning tree or forest of an undirected weighted graph. A minimum spanning tree is a subgraph of the graph (a tree) with the minimum sum of edge weights. If the graph is not connected a spanning forest is constructed. A spanning forest is a union of the spanning trees for each connected component of the graph. Parameters ---------- G : NetworkX Graph weight : string Edge data key to use for weight (default 'weight'). Returns ------- G : NetworkX Graph A minimum spanning tree or forest. Examples -------- >>> G=nx.cycle_graph(4) >>> G.add_edge(0,3,weight=2) # assign weight 2 to edge 0-3 >>> T=nx.prim_mst(G) >>> print(sorted(T.edges(data=True))) [(0, 1, {}), (1, 2, {}), (2, 3, {})] Notes ----- Uses Prim's algorithm. If the graph edges do not have a weight attribute a default weight of 1 will be used. """ T=nx.Graph(nx.prim_mst_edges(G,weight=weight,data=True)) # Add isolated nodes if len(T)!=len(G): T.add_nodes_from([n for n,d in G.degree().items() if d==0]) # Add node and graph attributes as shallow copy for n in T: T.node[n]=G.node[n].copy() T.graph=G.graph.copy() return T networkx-1.8.1/networkx/algorithms/chordal/0000775000175000017500000000000012177457361020745 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/chordal/chordal_alg.py0000664000175000017500000002502012177456333023553 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Algorithms for chordal graphs. A graph is chordal if every cycle of length at least 4 has a chord (an edge joining two nodes not adjacent in the cycle). http://en.wikipedia.org/wiki/Chordal_graph """ import networkx as nx import random import sys __authors__ = "\n".join(['Jesus Cerquides ']) # Copyright (C) 2010 by # Jesus Cerquides # All rights reserved. # BSD license. __all__ = ['is_chordal', 'find_induced_nodes', 'chordal_graph_cliques', 'chordal_graph_treewidth', 'NetworkXTreewidthBoundExceeded'] class NetworkXTreewidthBoundExceeded(nx.NetworkXException): """Exception raised when a treewidth bound has been provided and it has been exceeded""" def is_chordal(G): """Checks whether G is a chordal graph. A graph is chordal if every cycle of length at least 4 has a chord (an edge joining two nodes not adjacent in the cycle). Parameters ---------- G : graph A NetworkX graph. Returns ------- chordal : bool True if G is a chordal graph and False otherwise. Raises ------ NetworkXError The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a NetworkXError is raised. Examples -------- >>> import networkx as nx >>> e=[(1,2),(1,3),(2,3),(2,4),(3,4),(3,5),(3,6),(4,5),(4,6),(5,6)] >>> G=nx.Graph(e) >>> nx.is_chordal(G) True Notes ----- The routine tries to go through every node following maximum cardinality search. It returns False when it finds that the separator for any node is not a clique. Based on the algorithms in [1]_. References ---------- .. [1] R. E. Tarjan and M. Yannakakis, Simple linear-time algorithms to test chordality of graphs, test acyclicity of hypergraphs, and selectively reduce acyclic hypergraphs, SIAM J. Comput., 13 (1984), pp. 566–579. """ if G.is_directed(): raise nx.NetworkXError('Directed graphs not supported') if G.is_multigraph(): raise nx.NetworkXError('Multiply connected graphs not supported.') if len(_find_chordality_breaker(G))==0: return True else: return False def find_induced_nodes(G,s,t,treewidth_bound=sys.maxsize): """Returns the set of induced nodes in the path from s to t. Parameters ---------- G : graph A chordal NetworkX graph s : node Source node to look for induced nodes t : node Destination node to look for induced nodes treewith_bound: float Maximum treewidth acceptable for the graph H. The search for induced nodes will end as soon as the treewidth_bound is exceeded. Returns ------- I : Set of nodes The set of induced nodes in the path from s to t in G Raises ------ NetworkXError The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a NetworkXError is raised. The algorithm can only be applied to chordal graphs. If the input graph is found to be non-chordal, a NetworkXError is raised. Examples -------- >>> import networkx as nx >>> G=nx.Graph() >>> G = nx.generators.classic.path_graph(10) >>> I = nx.find_induced_nodes(G,1,9,2) >>> list(I) [1, 2, 3, 4, 5, 6, 7, 8, 9] Notes ----- G must be a chordal graph and (s,t) an edge that is not in G. If a treewidth_bound is provided, the search for induced nodes will end as soon as the treewidth_bound is exceeded. The algorithm is inspired by Algorithm 4 in [1]_. A formal definition of induced node can also be found on that reference. References ---------- .. [1] Learning Bounded Treewidth Bayesian Networks. Gal Elidan, Stephen Gould; JMLR, 9(Dec):2699--2731, 2008. http://jmlr.csail.mit.edu/papers/volume9/elidan08a/elidan08a.pdf """ if not is_chordal(G): raise nx.NetworkXError("Input graph is not chordal.") H = nx.Graph(G) H.add_edge(s,t) I = set() triplet = _find_chordality_breaker(H,s,treewidth_bound) while triplet: (u,v,w) = triplet I.update(triplet) for n in triplet: if n!=s: H.add_edge(s,n) triplet = _find_chordality_breaker(H,s,treewidth_bound) if I: # Add t and the second node in the induced path from s to t. I.add(t) for u in G[s]: if len(I & set(G[u]))==2: I.add(u) break return I def chordal_graph_cliques(G): """Returns the set of maximal cliques of a chordal graph. The algorithm breaks the graph in connected components and performs a maximum cardinality search in each component to get the cliques. Parameters ---------- G : graph A NetworkX graph Returns ------- cliques : A set containing the maximal cliques in G. Raises ------ NetworkXError The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a NetworkXError is raised. The algorithm can only be applied to chordal graphs. If the input graph is found to be non-chordal, a NetworkXError is raised. Examples -------- >>> import networkx as nx >>> e= [(1,2),(1,3),(2,3),(2,4),(3,4),(3,5),(3,6),(4,5),(4,6),(5,6),(7,8)] >>> G = nx.Graph(e) >>> G.add_node(9) >>> setlist = nx.chordal_graph_cliques(G) """ if not is_chordal(G): raise nx.NetworkXError("Input graph is not chordal.") cliques = set() for C in nx.connected.connected_component_subgraphs(G): cliques |= _connected_chordal_graph_cliques(C) return cliques def chordal_graph_treewidth(G): """Returns the treewidth of the chordal graph G. Parameters ---------- G : graph A NetworkX graph Returns ------- treewidth : int The size of the largest clique in the graph minus one. Raises ------ NetworkXError The algorithm does not support DiGraph, MultiGraph and MultiDiGraph. If the input graph is an instance of one of these classes, a NetworkXError is raised. The algorithm can only be applied to chordal graphs. If the input graph is found to be non-chordal, a NetworkXError is raised. Examples -------- >>> import networkx as nx >>> e = [(1,2),(1,3),(2,3),(2,4),(3,4),(3,5),(3,6),(4,5),(4,6),(5,6),(7,8)] >>> G = nx.Graph(e) >>> G.add_node(9) >>> nx.chordal_graph_treewidth(G) 3 References ---------- .. [1] http://en.wikipedia.org/wiki/Tree_decomposition#Treewidth """ if not is_chordal(G): raise nx.NetworkXError("Input graph is not chordal.") max_clique = -1 for clique in nx.chordal_graph_cliques(G): max_clique = max(max_clique,len(clique)) return max_clique - 1 def _is_complete_graph(G): """Returns True if G is a complete graph.""" if G.number_of_selfloops()>0: raise nx.NetworkXError("Self loop found in _is_complete_graph()") n = G.number_of_nodes() if n < 2: return True e = G.number_of_edges() max_edges = ((n * (n-1))/2) return e == max_edges def _find_missing_edge(G): """ Given a non-complete graph G, returns a missing edge.""" nodes=set(G) for u in G: missing=nodes-set(list(G[u].keys())+[u]) if missing: return (u,missing.pop()) def _max_cardinality_node(G,choices,wanna_connect): """Returns a the node in choices that has more connections in G to nodes in wanna_connect. """ # max_number = None max_number = -1 for x in choices: number=len([y for y in G[x] if y in wanna_connect]) if number > max_number: max_number = number max_cardinality_node = x return max_cardinality_node def _find_chordality_breaker(G,s=None,treewidth_bound=sys.maxsize): """ Given a graph G, starts a max cardinality search (starting from s if s is given and from a random node otherwise) trying to find a non-chordal cycle. If it does find one, it returns (u,v,w) where u,v,w are the three nodes that together with s are involved in the cycle. """ unnumbered = set(G) if s is None: s = random.choice(list(unnumbered)) unnumbered.remove(s) numbered = set([s]) # current_treewidth = None current_treewidth = -1 while unnumbered:# and current_treewidth <= treewidth_bound: v = _max_cardinality_node(G,unnumbered,numbered) unnumbered.remove(v) numbered.add(v) clique_wanna_be = set(G[v]) & numbered sg = G.subgraph(clique_wanna_be) if _is_complete_graph(sg): # The graph seems to be chordal by now. We update the treewidth current_treewidth = max(current_treewidth,len(clique_wanna_be)) if current_treewidth > treewidth_bound: raise nx.NetworkXTreewidthBoundExceeded(\ "treewidth_bound exceeded: %s"%current_treewidth) else: # sg is not a clique, # look for an edge that is not included in sg (u,w) = _find_missing_edge(sg) return (u,v,w) return () def _connected_chordal_graph_cliques(G): """Return the set of maximal cliques of a connected chordal graph.""" if G.number_of_nodes() == 1: x = frozenset(G.nodes()) return set([x]) else: cliques = set() unnumbered = set(G.nodes()) v = random.choice(list(unnumbered)) unnumbered.remove(v) numbered = set([v]) clique_wanna_be = set([v]) while unnumbered: v = _max_cardinality_node(G,unnumbered,numbered) unnumbered.remove(v) numbered.add(v) new_clique_wanna_be = set(G.neighbors(v)) & numbered sg = G.subgraph(clique_wanna_be) if _is_complete_graph(sg): new_clique_wanna_be.add(v) if not new_clique_wanna_be >= clique_wanna_be: cliques.add(frozenset(clique_wanna_be)) clique_wanna_be = new_clique_wanna_be else: raise nx.NetworkXError("Input graph is not chordal.") cliques.add(frozenset(clique_wanna_be)) return cliques networkx-1.8.1/networkx/algorithms/chordal/__init__.py0000664000175000017500000000007012177456333023051 0ustar aricaric00000000000000from networkx.algorithms.chordal.chordal_alg import * networkx-1.8.1/networkx/algorithms/chordal/tests/0000775000175000017500000000000012177457361022107 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/chordal/tests/test_chordal.py0000664000175000017500000000451112177456333025133 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestMCS: def setUp(self): # simple graph connected_chordal_G=nx.Graph() connected_chordal_G.add_edges_from([(1,2),(1,3),(2,3),(2,4),(3,4), (3,5),(3,6),(4,5),(4,6),(5,6)]) self.connected_chordal_G=connected_chordal_G chordal_G = nx.Graph() chordal_G.add_edges_from([(1,2),(1,3),(2,3),(2,4),(3,4), (3,5),(3,6),(4,5),(4,6),(5,6),(7,8)]) chordal_G.add_node(9) self.chordal_G=chordal_G non_chordal_G = nx.Graph() non_chordal_G.add_edges_from([(1,2),(1,3),(2,4),(2,5),(3,4),(3,5)]) self.non_chordal_G = non_chordal_G def test_is_chordal(self): assert_false(nx.is_chordal(self.non_chordal_G)) assert_true(nx.is_chordal(self.chordal_G)) assert_true(nx.is_chordal(self.connected_chordal_G)) assert_true(nx.is_chordal(nx.complete_graph(3))) assert_true(nx.is_chordal(nx.cycle_graph(3))) assert_false(nx.is_chordal(nx.cycle_graph(5))) def test_induced_nodes(self): G = nx.generators.classic.path_graph(10) I = nx.find_induced_nodes(G,1,9,2) assert_equal(I,set([1,2,3,4,5,6,7,8,9])) assert_raises(nx.NetworkXTreewidthBoundExceeded, nx.find_induced_nodes,G,1,9,1) I = nx.find_induced_nodes(self.chordal_G,1,6) assert_equal(I,set([1,2,4,6])) assert_raises(nx.NetworkXError, nx.find_induced_nodes,self.non_chordal_G,1,5) def test_chordal_find_cliques(self): cliques = set([frozenset([9]),frozenset([7,8]),frozenset([1,2,3]), frozenset([2,3,4]),frozenset([3,4,5,6])]) assert_equal(nx.chordal_graph_cliques(self.chordal_G),cliques) def test_chordal_find_cliques_path(self): G = nx.path_graph(10) cliqueset = nx.chordal_graph_cliques(G) for (u,v) in G.edges_iter(): assert_true(frozenset([u,v]) in cliqueset or frozenset([v,u]) in cliqueset) def test_chordal_find_cliquesCC(self): cliques = set([frozenset([1,2,3]),frozenset([2,3,4]), frozenset([3,4,5,6])]) assert_equal(nx.chordal_graph_cliques(self.connected_chordal_G),cliques) networkx-1.8.1/networkx/algorithms/centrality/0000775000175000017500000000000012177457361021507 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/centrality/current_flow_betweenness.py0000664000175000017500000003216512177456333027201 0ustar aricaric00000000000000""" Current-flow betweenness centrality measures. """ # Copyright (C) 2010-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import random import networkx as nx from networkx.algorithms.centrality.flow_matrix import * __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['current_flow_betweenness_centrality', 'approximate_current_flow_betweenness_centrality', 'edge_current_flow_betweenness_centrality'] def approximate_current_flow_betweenness_centrality(G, normalized=True, weight='weight', dtype=float, solver='full', epsilon=0.5, kmax=10000): r"""Compute the approximate current-flow betweenness centrality for nodes. Approximates the current-flow betweenness centrality within absolute error of epsilon with high probability [1]_. Parameters ---------- G : graph A NetworkX graph normalized : bool, optional (default=True) If True the betweenness values are normalized by 2/[(n-1)(n-2)] where n is the number of nodes in G. weight : string or None, optional (default='weight') Key for edge data used as the edge weight. If None, then use 1 as each edge weight. dtype: data type (float) Default data type for internal matrices. Set to np.float32 for lower memory consumption. solver: string (default='lu') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). epsilon: float Absolute error tolerance. kmax: int Maximum number of sample node pairs to use for approximation. Returns ------- nodes : dictionary Dictionary of nodes with betweenness centrality as the value. See Also -------- current_flow_betweenness_centrality Notes ----- The running time is `O((1/\epsilon^2)m{\sqrt k} \log n)` and the space required is `O(m)` for n nodes and m edges. If the edges have a 'weight' attribute they will be used as weights in this algorithm. Unspecified weights are set to 1. References ---------- .. [1] Centrality Measures Based on Current Flow. Ulrik Brandes and Daniel Fleischer, Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). LNCS 3404, pp. 533-544. Springer-Verlag, 2005. http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf """ from networkx.utils import reverse_cuthill_mckee_ordering try: import numpy as np except ImportError: raise ImportError('current_flow_betweenness_centrality requires NumPy ', 'http://scipy.org/') try: from scipy import sparse from scipy.sparse import linalg except ImportError: raise ImportError('current_flow_betweenness_centrality requires SciPy ', 'http://scipy.org/') if G.is_directed(): raise nx.NetworkXError('current_flow_betweenness_centrality() ', 'not defined for digraphs.') if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") solvername={"full" :FullInverseLaplacian, "lu": SuperLUInverseLaplacian, "cg": CGInverseLaplacian} n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G,dict(zip(ordering,range(n)))) L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, dtype=dtype, format='csc') C = solvername[solver](L, dtype=dtype) # initialize solver betweenness = dict.fromkeys(H,0.0) nb = (n-1.0)*(n-2.0) # normalization factor cstar = n*(n-1)/nb l = 1 # parameter in approximation, adjustable k = l*int(np.ceil((cstar/epsilon)**2*np.log(n))) if k > kmax: raise nx.NetworkXError('Number random pairs k>kmax (%d>%d) '%(k,kmax), 'Increase kmax or epsilon') cstar2k = cstar/(2*k) for i in range(k): s,t = random.sample(range(n),2) b = np.zeros(n, dtype=dtype) b[s] = 1 b[t] = -1 p = C.solve(b) for v in H: if v==s or v==t: continue for nbr in H[v]: w = H[v][nbr].get(weight,1.0) betweenness[v] += w*np.abs(p[v]-p[nbr])*cstar2k if normalized: factor = 1.0 else: factor = nb/2.0 # remap to original node names and "unnormalize" if required return dict((ordering[k],float(v*factor)) for k,v in betweenness.items()) def current_flow_betweenness_centrality(G, normalized=True, weight='weight', dtype=float, solver='full'): r"""Compute current-flow betweenness centrality for nodes. Current-flow betweenness centrality uses an electrical current model for information spreading in contrast to betweenness centrality which uses shortest paths. Current-flow betweenness centrality is also known as random-walk betweenness centrality [2]_. Parameters ---------- G : graph A NetworkX graph normalized : bool, optional (default=True) If True the betweenness values are normalized by 2/[(n-1)(n-2)] where n is the number of nodes in G. weight : string or None, optional (default='weight') Key for edge data used as the edge weight. If None, then use 1 as each edge weight. dtype: data type (float) Default data type for internal matrices. Set to np.float32 for lower memory consumption. solver: string (default='lu') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). Returns ------- nodes : dictionary Dictionary of nodes with betweenness centrality as the value. See Also -------- approximate_current_flow_betweenness_centrality betweenness_centrality edge_betweenness_centrality edge_current_flow_betweenness_centrality Notes ----- Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)` time [1]_, where `I(n-1)` is the time needed to compute the inverse Laplacian. For a full matrix this is `O(n^3)` but using sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the Laplacian matrix condition number. The space required is `O(nw) where `w` is the width of the sparse Laplacian matrix. Worse case is `w=n` for `O(n^2)`. If the edges have a 'weight' attribute they will be used as weights in this algorithm. Unspecified weights are set to 1. References ---------- .. [1] Centrality Measures Based on Current Flow. Ulrik Brandes and Daniel Fleischer, Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). LNCS 3404, pp. 533-544. Springer-Verlag, 2005. http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf .. [2] A measure of betweenness centrality based on random walks, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering try: import numpy as np except ImportError: raise ImportError('current_flow_betweenness_centrality requires NumPy ', 'http://scipy.org/') try: import scipy except ImportError: raise ImportError('current_flow_betweenness_centrality requires SciPy ', 'http://scipy.org/') if G.is_directed(): raise nx.NetworkXError('current_flow_betweenness_centrality() ', 'not defined for digraphs.') if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G,dict(zip(ordering,range(n)))) betweenness = dict.fromkeys(H,0.0) # b[v]=0 for v in H for row,(s,t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): pos = dict(zip(row.argsort()[::-1],range(n))) for i in range(n): betweenness[s] += (i-pos[i])*row[i] betweenness[t] += (n-i-1-pos[i])*row[i] if normalized: nb = (n-1.0)*(n-2.0) # normalization factor else: nb = 2.0 for i,v in enumerate(H): # map integers to nodes betweenness[v] = float((betweenness[v]-i)*2.0/nb) return dict((ordering[k],v) for k,v in betweenness.items()) def edge_current_flow_betweenness_centrality(G, normalized=True, weight='weight', dtype=float, solver='full'): """Compute current-flow betweenness centrality for edges. Current-flow betweenness centrality uses an electrical current model for information spreading in contrast to betweenness centrality which uses shortest paths. Current-flow betweenness centrality is also known as random-walk betweenness centrality [2]_. Parameters ---------- G : graph A NetworkX graph normalized : bool, optional (default=True) If True the betweenness values are normalized by 2/[(n-1)(n-2)] where n is the number of nodes in G. weight : string or None, optional (default='weight') Key for edge data used as the edge weight. If None, then use 1 as each edge weight. dtype: data type (float) Default data type for internal matrices. Set to np.float32 for lower memory consumption. solver: string (default='lu') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). Returns ------- nodes : dictionary Dictionary of edge tuples with betweenness centrality as the value. See Also -------- betweenness_centrality edge_betweenness_centrality current_flow_betweenness_centrality Notes ----- Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)` time [1]_, where `I(n-1)` is the time needed to compute the inverse Laplacian. For a full matrix this is `O(n^3)` but using sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the Laplacian matrix condition number. The space required is `O(nw) where `w` is the width of the sparse Laplacian matrix. Worse case is `w=n` for `O(n^2)`. If the edges have a 'weight' attribute they will be used as weights in this algorithm. Unspecified weights are set to 1. References ---------- .. [1] Centrality Measures Based on Current Flow. Ulrik Brandes and Daniel Fleischer, Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). LNCS 3404, pp. 533-544. Springer-Verlag, 2005. http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf .. [2] A measure of betweenness centrality based on random walks, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering try: import numpy as np except ImportError: raise ImportError('current_flow_betweenness_centrality requires NumPy ', 'http://scipy.org/') try: import scipy except ImportError: raise ImportError('current_flow_betweenness_centrality requires SciPy ', 'http://scipy.org/') if G.is_directed(): raise nx.NetworkXError('edge_current_flow_betweenness_centrality ', 'not defined for digraphs.') if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G,dict(zip(ordering,range(n)))) betweenness=(dict.fromkeys(H.edges(),0.0)) if normalized: nb=(n-1.0)*(n-2.0) # normalization factor else: nb=2.0 for row,(e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): pos=dict(zip(row.argsort()[::-1],range(1,n+1))) for i in range(n): betweenness[e]+=(i+1-pos[i])*row[i] betweenness[e]+=(n-i-pos[i])*row[i] betweenness[e]/=nb return dict(((ordering[s],ordering[t]),float(v)) for (s,t),v in betweenness.items()) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy import scipy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/algorithms/centrality/betweenness_subset.py0000664000175000017500000002050612177456333025771 0ustar aricaric00000000000000""" Betweenness centrality measures for subsets of nodes. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['betweenness_centrality_subset', 'edge_betweenness_centrality_subset', 'betweenness_centrality_source'] import networkx as nx from networkx.algorithms.centrality.betweenness import\ _single_source_dijkstra_path_basic as dijkstra from networkx.algorithms.centrality.betweenness import\ _single_source_shortest_path_basic as shortest_path def betweenness_centrality_subset(G,sources,targets, normalized=False, weight=None): """Compute betweenness centrality for a subset of nodes. .. math:: c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)} where `S` is the set of sources, `T` is the set of targets, `\sigma(s, t)` is the number of shortest `(s, t)`-paths, and `\sigma(s, t|v)` is the number of those paths passing through some node `v` other than `s, t`. If `s = t`, `\sigma(s, t) = 1`, and if `v \in {s, t}`, `\sigma(s, t|v) = 0` [2]_. Parameters ---------- G : graph sources: list of nodes Nodes to use as sources for shortest paths in betweenness targets: list of nodes Nodes to use as targets for shortest paths in betweenness normalized : bool, optional If True the betweenness values are normalized by `2/((n-1)(n-2))` for graphs, and `1/((n-1)(n-2))` for directed graphs where `n` is the number of nodes in G. weight : None or string, optional If None, all edge weights are considered equal. Otherwise holds the name of the edge attribute used as weight. Returns ------- nodes : dictionary Dictionary of nodes with betweenness centrality as the value. See Also -------- edge_betweenness_centrality load_centrality Notes ----- The basic algorithm is from [1]_. For weighted graphs the edge weights must be greater than zero. Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. The normalization might seem a little strange but it is the same as in betweenness_centrality() and is designed to make betweenness_centrality(G) be the same as betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()). References ---------- .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001. http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness Centrality and their Generic Computation. Social Networks 30(2):136-145, 2008. http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf """ b=dict.fromkeys(G,0.0) # b[v]=0 for v in G for s in sources: # single source shortest paths if weight is None: # use BFS S,P,sigma=shortest_path(G,s) else: # use Dijkstra's algorithm S,P,sigma=dijkstra(G,s,weight) b=_accumulate_subset(b,S,P,sigma,s,targets) b=_rescale(b,len(G),normalized=normalized,directed=G.is_directed()) return b def edge_betweenness_centrality_subset(G,sources,targets, normalized=False, weight=None): """Compute betweenness centrality for edges for a subset of nodes. .. math:: c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)} where `S` is the set of sources, `T` is the set of targets, `\sigma(s, t)` is the number of shortest `(s, t)`-paths, and `\sigma(s, t|e)` is the number of those paths passing through edge `e` [2]_. Parameters ---------- G : graph A networkx graph sources: list of nodes Nodes to use as sources for shortest paths in betweenness targets: list of nodes Nodes to use as targets for shortest paths in betweenness normalized : bool, optional If True the betweenness values are normalized by `2/(n(n-1))` for graphs, and `1/(n(n-1))` for directed graphs where `n` is the number of nodes in G. weight : None or string, optional If None, all edge weights are considered equal. Otherwise holds the name of the edge attribute used as weight. Returns ------- edges : dictionary Dictionary of edges with Betweenness centrality as the value. See Also -------- betweenness_centrality edge_load Notes ----- The basic algorithm is from [1]_. For weighted graphs the edge weights must be greater than zero. Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. The normalization might seem a little strange but it is the same as in edge_betweenness_centrality() and is designed to make edge_betweenness_centrality(G) be the same as edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()). References ---------- .. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality. Journal of Mathematical Sociology 25(2):163-177, 2001. http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness Centrality and their Generic Computation. Social Networks 30(2):136-145, 2008. http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf """ b=dict.fromkeys(G,0.0) # b[v]=0 for v in G b.update(dict.fromkeys(G.edges(),0.0)) # b[e] for e in G.edges() for s in sources: # single source shortest paths if weight is None: # use BFS S,P,sigma=shortest_path(G,s) else: # use Dijkstra's algorithm S,P,sigma=dijkstra(G,s,weight) b=_accumulate_edges_subset(b,S,P,sigma,s,targets) for n in G: # remove nodes to only return edges del b[n] b=_rescale_e(b,len(G),normalized=normalized,directed=G.is_directed()) return b # obsolete name def betweenness_centrality_source(G,normalized=True,weight=None,sources=None): if sources is None: sources=G.nodes() targets=G.nodes() return betweenness_centrality_subset(G,sources,targets,normalized,weight) def _accumulate_subset(betweenness,S,P,sigma,s,targets): delta=dict.fromkeys(S,0) target_set=set(targets) while S: w=S.pop() for v in P[w]: if w in target_set: delta[v]+=(sigma[v]/sigma[w])*(1.0+delta[w]) else: delta[v]+=delta[w]/len(P[w]) if w != s: betweenness[w]+=delta[w] return betweenness def _accumulate_edges_subset(betweenness,S,P,sigma,s,targets): delta=dict.fromkeys(S,0) target_set=set(targets) while S: w=S.pop() for v in P[w]: if w in target_set: c=(sigma[v]/sigma[w])*(1.0+delta[w]) else: c=delta[w]/len(P[w]) if (v,w) not in betweenness: betweenness[(w,v)]+=c else: betweenness[(v,w)]+=c delta[v]+=c if w != s: betweenness[w]+=delta[w] return betweenness def _rescale(betweenness,n,normalized,directed=False): if normalized is True: if n <=2: scale=None # no normalization b=0 for all nodes else: scale=1.0/((n-1)*(n-2)) else: # rescale by 2 for undirected graphs if not directed: scale=1.0/2.0 else: scale=None if scale is not None: for v in betweenness: betweenness[v] *= scale return betweenness def _rescale_e(betweenness,n,normalized,directed=False): if normalized is True: if n <=1: scale=None # no normalization b=0 for all nodes else: scale=1.0/(n*(n-1)) else: # rescale by 2 for undirected graphs if not directed: scale=1.0/2.0 else: scale=None if scale is not None: for v in betweenness: betweenness[v] *= scale return betweenness networkx-1.8.1/networkx/algorithms/centrality/communicability_alg.py0000664000175000017500000003376112177456333026104 0ustar aricaric00000000000000""" Communicability and centrality measures. """ # Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import * __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Franck Kalala (franckkalala@yahoo.fr']) __all__ = ['communicability_centrality_exp', 'communicability_centrality', 'communicability_betweenness_centrality', 'communicability', 'communicability_exp', 'estrada_index', ] @require('scipy') @not_implemented_for('directed') @not_implemented_for('multigraph') def communicability_centrality_exp(G): r"""Return the communicability centrality for each node of G Communicability centrality, also called subgraph centrality, of a node `n` is the sum of closed walks of all lengths starting and ending at node `n`. Parameters ---------- G: graph Returns ------- nodes:dictionary Dictionary of nodes with communicability centrality as the value. Raises ------ NetworkXError If the graph is not undirected and simple. See Also -------- communicability: Communicability between all pairs of nodes in G. communicability_centrality: Communicability centrality for each node of G. Notes ----- This version of the algorithm exponentiates the adjacency matrix. The communicability centrality of a node `u` in G can be found using the matrix exponential of the adjacency matrix of G [1]_ [2]_, .. math:: SC(u)=(e^A)_{uu} . References ---------- .. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez, "Subgraph centrality in complex networks", Physical Review E 71, 056103 (2005). http://arxiv.org/abs/cond-mat/0504730 .. [2] Ernesto Estrada, Naomichi Hatano, "Communicability in complex networks", Phys. Rev. E 77, 036111 (2008). http://arxiv.org/abs/0707.0756 Examples -------- >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) >>> sc = nx.communicability_centrality_exp(G) """ # alternative implementation that calculates the matrix exponential import scipy.linalg nodelist = G.nodes() # ordering of nodes in matrix A = nx.to_numpy_matrix(G,nodelist) # convert to 0-1 matrix A[A!=0.0] = 1 expA = scipy.linalg.expm(A) # convert diagonal to dictionary keyed by node sc = dict(zip(nodelist,map(float,expA.diagonal()))) return sc @require('numpy') @not_implemented_for('directed') @not_implemented_for('multigraph') def communicability_centrality(G): r"""Return communicability centrality for each node in G. Communicability centrality, also called subgraph centrality, of a node `n` is the sum of closed walks of all lengths starting and ending at node `n`. Parameters ---------- G: graph Returns ------- nodes: dictionary Dictionary of nodes with communicability centrality as the value. Raises ------ NetworkXError If the graph is not undirected and simple. See Also -------- communicability: Communicability between all pairs of nodes in G. communicability_centrality: Communicability centrality for each node of G. Notes ----- This version of the algorithm computes eigenvalues and eigenvectors of the adjacency matrix. Communicability centrality of a node `u` in G can be found using a spectral decomposition of the adjacency matrix [1]_ [2]_, .. math:: SC(u)=\sum_{j=1}^{N}(v_{j}^{u})^2 e^{\lambda_{j}}, where `v_j` is an eigenvector of the adjacency matrix `A` of G corresponding corresponding to the eigenvalue `\lambda_j`. Examples -------- >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) >>> sc = nx.communicability_centrality(G) References ---------- .. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez, "Subgraph centrality in complex networks", Physical Review E 71, 056103 (2005). http://arxiv.org/abs/cond-mat/0504730 .. [2] Ernesto Estrada, Naomichi Hatano, "Communicability in complex networks", Phys. Rev. E 77, 036111 (2008). http://arxiv.org/abs/0707.0756 """ import numpy import numpy.linalg nodelist = G.nodes() # ordering of nodes in matrix A = nx.to_numpy_matrix(G,nodelist) # convert to 0-1 matrix A[A!=0.0] = 1 w,v = numpy.linalg.eigh(A) vsquare = numpy.array(v)**2 expw = numpy.exp(w) xg = numpy.dot(vsquare,expw) # convert vector dictionary keyed by node sc = dict(zip(nodelist,map(float,xg))) return sc @require('scipy') @not_implemented_for('directed') @not_implemented_for('multigraph') def communicability_betweenness_centrality(G, normalized=True): r"""Return communicability betweenness for all pairs of nodes in G. Communicability betweenness measure makes use of the number of walks connecting every pair of nodes as the basis of a betweenness centrality measure. Parameters ---------- G: graph Returns ------- nodes:dictionary Dictionary of nodes with communicability betweenness as the value. Raises ------ NetworkXError If the graph is not undirected and simple. See Also -------- communicability: Communicability between all pairs of nodes in G. communicability_centrality: Communicability centrality for each node of G using matrix exponential. communicability_centrality_exp: Communicability centrality for each node in G using spectral decomposition. Notes ----- Let `G=(V,E)` be a simple undirected graph with `n` nodes and `m` edges, and `A` denote the adjacency matrix of `G`. Let `G(r)=(V,E(r))` be the graph resulting from removing all edges connected to node `r` but not the node itself. The adjacency matrix for `G(r)` is `A+E(r)`, where `E(r)` has nonzeros only in row and column `r`. The communicability betweenness of a node `r` is [1]_ .. math:: \omega_{r} = \frac{1}{C}\sum_{p}\sum_{q}\frac{G_{prq}}{G_{pq}}, p\neq q, q\neq r, where `G_{prq}=(e^{A}_{pq} - (e^{A+E(r)})_{pq}` is the number of walks involving node r, `G_{pq}=(e^{A})_{pq}` is the number of closed walks starting at node `p` and ending at node `q`, and `C=(n-1)^{2}-(n-1)` is a normalization factor equal to the number of terms in the sum. The resulting `\omega_{r}` takes values between zero and one. The lower bound cannot be attained for a connected graph, and the upper bound is attained in the star graph. References ---------- .. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano, "Communicability Betweenness in Complex Networks" Physica A 388 (2009) 764-774. http://arxiv.org/abs/0905.4102 Examples -------- >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) >>> cbc = nx.communicability_betweenness_centrality(G) """ import scipy import scipy.linalg nodelist = G.nodes() # ordering of nodes in matrix n = len(nodelist) A = nx.to_numpy_matrix(G,nodelist) # convert to 0-1 matrix A[A!=0.0] = 1 expA = scipy.linalg.expm(A) mapping = dict(zip(nodelist,range(n))) sc = {} for v in G: # remove row and col of node v i = mapping[v] row = A[i,:].copy() col = A[:,i].copy() A[i,:] = 0 A[:,i] = 0 B = (expA - scipy.linalg.expm(A)) / expA # sum with row/col of node v and diag set to zero B[i,:] = 0 B[:,i] = 0 B -= scipy.diag(scipy.diag(B)) sc[v] = float(B.sum()) # put row and col back A[i,:] = row A[:,i] = col # rescaling sc = _rescale(sc,normalized=normalized) return sc def _rescale(sc,normalized): # helper to rescale betweenness centrality if normalized is True: order=len(sc) if order <=2: scale=None else: scale=1.0/((order-1.0)**2-(order-1.0)) if scale is not None: for v in sc: sc[v] *= scale return sc @require('numpy','scipy') @not_implemented_for('directed') @not_implemented_for('multigraph') def communicability(G): r"""Return communicability between all pairs of nodes in G. The communicability between pairs of nodes in G is the sum of closed walks of different lengths starting at node u and ending at node v. Parameters ---------- G: graph Returns ------- comm: dictionary of dictionaries Dictionary of dictionaries keyed by nodes with communicability as the value. Raises ------ NetworkXError If the graph is not undirected and simple. See Also -------- communicability_centrality_exp: Communicability centrality for each node of G using matrix exponential. communicability_centrality: Communicability centrality for each node in G using spectral decomposition. communicability: Communicability between pairs of nodes in G. Notes ----- This algorithm uses a spectral decomposition of the adjacency matrix. Let G=(V,E) be a simple undirected graph. Using the connection between the powers of the adjacency matrix and the number of walks in the graph, the communicability between nodes `u` and `v` based on the graph spectrum is [1]_ .. math:: C(u,v)=\sum_{j=1}^{n}\phi_{j}(u)\phi_{j}(v)e^{\lambda_{j}}, where `\phi_{j}(u)` is the `u\rm{th}` element of the `j\rm{th}` orthonormal eigenvector of the adjacency matrix associated with the eigenvalue `\lambda_{j}`. References ---------- .. [1] Ernesto Estrada, Naomichi Hatano, "Communicability in complex networks", Phys. Rev. E 77, 036111 (2008). http://arxiv.org/abs/0707.0756 Examples -------- >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) >>> c = nx.communicability(G) """ import numpy import scipy.linalg nodelist = G.nodes() # ordering of nodes in matrix A = nx.to_numpy_matrix(G,nodelist) # convert to 0-1 matrix A[A!=0.0] = 1 w,vec = numpy.linalg.eigh(A) expw = numpy.exp(w) mapping = dict(zip(nodelist,range(len(nodelist)))) sc={} # computing communicabilities for u in G: sc[u]={} for v in G: s = 0 p = mapping[u] q = mapping[v] for j in range(len(nodelist)): s += vec[:,j][p,0]*vec[:,j][q,0]*expw[j] sc[u][v] = float(s) return sc @require('scipy') @not_implemented_for('directed') @not_implemented_for('multigraph') def communicability_exp(G): r"""Return communicability between all pairs of nodes in G. Communicability between pair of node (u,v) of node in G is the sum of closed walks of different lengths starting at node u and ending at node v. Parameters ---------- G: graph Returns ------- comm: dictionary of dictionaries Dictionary of dictionaries keyed by nodes with communicability as the value. Raises ------ NetworkXError If the graph is not undirected and simple. See Also -------- communicability_centrality_exp: Communicability centrality for each node of G using matrix exponential. communicability_centrality: Communicability centrality for each node in G using spectral decomposition. communicability_exp: Communicability between all pairs of nodes in G using spectral decomposition. Notes ----- This algorithm uses matrix exponentiation of the adjacency matrix. Let G=(V,E) be a simple undirected graph. Using the connection between the powers of the adjacency matrix and the number of walks in the graph, the communicability between nodes u and v is [1]_, .. math:: C(u,v) = (e^A)_{uv}, where `A` is the adjacency matrix of G. References ---------- .. [1] Ernesto Estrada, Naomichi Hatano, "Communicability in complex networks", Phys. Rev. E 77, 036111 (2008). http://arxiv.org/abs/0707.0756 Examples -------- >>> G = nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) >>> c = nx.communicability_exp(G) """ import scipy.linalg nodelist = G.nodes() # ordering of nodes in matrix A = nx.to_numpy_matrix(G,nodelist) # convert to 0-1 matrix A[A!=0.0] = 1 # communicability matrix expA = scipy.linalg.expm(A) mapping = dict(zip(nodelist,range(len(nodelist)))) sc = {} for u in G: sc[u]={} for v in G: sc[u][v] = float(expA[mapping[u],mapping[v]]) return sc @require('numpy') def estrada_index(G): r"""Return the Estrada index of a the graph G. Parameters ---------- G: graph Returns ------- estrada index: float Raises ------ NetworkXError If the graph is not undirected and simple. See also -------- estrada_index_exp Notes ----- Let `G=(V,E)` be a simple undirected graph with `n` nodes and let `\lambda_{1}\leq\lambda_{2}\leq\cdots\lambda_{n}` be a non-increasing ordering of the eigenvalues of its adjacency matrix `A`. The Estrada index is .. math:: EE(G)=\sum_{j=1}^n e^{\lambda _j}. References ---------- .. [1] E. Estrada, Characterization of 3D molecular structure, Chem. Phys. Lett. 319, 713 (2000). Examples -------- >>> G=nx.Graph([(0,1),(1,2),(1,5),(5,4),(2,4),(2,3),(4,3),(3,6)]) >>> ei=nx.estrada_index(G) """ return sum(communicability_centrality(G).values()) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/centrality/flow_matrix.py0000664000175000017500000001024012177456333024407 0ustar aricaric00000000000000# Helpers for current-flow betweenness and current-flow closness # Lazy computations for inverse Laplacian and flow-matrix rows. import networkx as nx def flow_matrix_row(G, weight='weight', dtype=float, solver='lu'): # Generate a row of the current-flow matrix import numpy as np from scipy import sparse from scipy.sparse import linalg solvername={"full" :FullInverseLaplacian, "lu": SuperLUInverseLaplacian, "cg": CGInverseLaplacian} n = G.number_of_nodes() L = laplacian_sparse_matrix(G, nodelist=range(n), weight=weight, dtype=dtype, format='csc') C = solvername[solver](L, dtype=dtype) # initialize solver w = C.w # w is the Laplacian matrix width # row-by-row flow matrix for u,v,d in G.edges_iter(data=True): B = np.zeros(w, dtype=dtype) c = d.get(weight,1.0) B[u%w] = c B[v%w] = -c # get only the rows needed in the inverse laplacian # and multiply to get the flow matrix row row = np.dot(B, C.get_rows(u,v)) yield row,(u,v) # Class to compute the inverse laplacian only for specified rows # Allows computation of the current-flow matrix without storing entire # inverse laplacian matrix class InverseLaplacian(object): def __init__(self, L, width=None, dtype=None): global np import numpy as np (n,n) = L.shape self.dtype = dtype self.n = n if width is None: self.w = self.width(L) else: self.w = width self.C = np.zeros((self.w,n), dtype=dtype) self.L1 = L[1:,1:] self.init_solver(L) def init_solver(self,L): pass def solve(self,r): raise("Implement solver") def solve_inverse(self,r): raise("Implement solver") def get_rows(self, r1, r2): for r in range(r1, r2+1): self.C[r%self.w, 1:] = self.solve_inverse(r) return self.C def get_row(self, r): self.C[r%self.w, 1:] = self.solve_inverse(r) return self.C[r%self.w] def width(self,L): m=0 for i,row in enumerate(L): w=0 x,y = np.nonzero(row) if len(y) > 0: v = y-i w=v.max()-v.min()+1 m = max(w,m) return m class FullInverseLaplacian(InverseLaplacian): def init_solver(self,L): self.IL = np.zeros(L.shape, dtype=self.dtype) self.IL[1:,1:] = np.linalg.inv(self.L1.todense()) def solve(self,rhs): s = np.zeros(rhs.shape, dtype=self.dtype) s = np.dot(self.IL,rhs) return s def solve_inverse(self,r): return self.IL[r,1:] class SuperLUInverseLaplacian(InverseLaplacian): def init_solver(self,L): from scipy.sparse import linalg self.lusolve = linalg.factorized(self.L1.tocsc()) def solve_inverse(self,r): rhs = np.zeros(self.n, dtype=self.dtype) rhs[r]=1 return self.lusolve(rhs[1:]) def solve(self,rhs): s = np.zeros(rhs.shape, dtype=self.dtype) s[1:]=self.lusolve(rhs[1:]) return s class CGInverseLaplacian(InverseLaplacian): def init_solver(self,L): global linalg from scipy.sparse import linalg ilu= linalg.spilu(self.L1.tocsc()) n=self.n-1 self.M = linalg.LinearOperator(shape=(n,n), matvec=ilu.solve) def solve(self,rhs): s = np.zeros(rhs.shape, dtype=self.dtype) s[1:]=linalg.cg(self.L1, rhs[1:], M=self.M)[0] return s def solve_inverse(self,r): rhs = np.zeros(self.n, self.dtype) rhs[r] = 1 return linalg.cg(self.L1, rhs[1:], M=self.M)[0] # graph laplacian, sparse version, will move to linalg/laplacianmatrix.py def laplacian_sparse_matrix(G, nodelist=None, weight='weight', dtype=None, format='csr'): import numpy as np import scipy.sparse A = nx.to_scipy_sparse_matrix(G, nodelist=nodelist, weight=weight, dtype=dtype, format=format) (n,n) = A.shape data = np.asarray(A.sum(axis=1).T) D = scipy.sparse.spdiags(data,0,n,n, format=format) return D - A networkx-1.8.1/networkx/algorithms/centrality/katz.py0000664000175000017500000002153112177456333023032 0ustar aricaric00000000000000""" Katz centrality. """ # Copyright (C) 2004-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import * __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Sasha Gutfraind (ag362@cornell.edu)', 'Vincent Gauthier (vgauthier@luxbulb.org)']) __all__ = ['katz_centrality', 'katz_centrality_numpy'] @not_implemented_for('multigraph') def katz_centrality(G, alpha=0.1, beta=1.0, max_iter=1000, tol=1.0e-6, nstart=None, normalized=True): r"""Compute the Katz centrality for the nodes of the graph G. Katz centrality is related to eigenvalue centrality and PageRank. The Katz centrality for node `i` is .. math:: x_i = \alpha \sum_{j} A_{ij} x_j + \beta, where `A` is the adjacency matrix of the graph G with eigenvalues `\lambda`. The parameter `\beta` controls the initial centrality and .. math:: \alpha < \frac{1}{\lambda_{max}}. Katz centrality computes the relative influence of a node within a network by measuring the number of the immediate neighbors (first degree nodes) and also all other nodes in the network that connect to the node under consideration through these immediate neighbors. Extra weight can be provided to immediate neighbors through the parameter :math:`\beta`. Connections made with distant neighbors are, however, penalized by an attenuation factor `\alpha` which should be strictly less than the inverse largest eigenvalue of the adjacency matrix in order for the Katz centrality to be computed correctly. More information is provided in [1]_ . Parameters ---------- G : graph A NetworkX graph alpha : float Attenuation factor beta : scalar or dictionary, optional (default=1.0) Weight attributed to the immediate neighborhood. If not a scalar the dictionary must have an value for every node. max_iter : integer, optional (default=1000) Maximum number of iterations in power method. tol : float, optional (default=1.0e-6) Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of Katz iteration for each node. normalized : bool, optional (default=True) If True normalize the resulting values. Returns ------- nodes : dictionary Dictionary of nodes with Katz centrality as the value. Examples -------- >>> import math >>> G = nx.path_graph(4) >>> phi = (1+math.sqrt(5))/2.0 # largest eigenvalue of adj matrix >>> centrality = nx.katz_centrality(G,1/phi-0.01) >>> for n,c in sorted(centrality.items()): ... print("%d %0.2f"%(n,c)) 0 0.37 1 0.60 2 0.60 3 0.37 Notes ----- This algorithm it uses the power method to find the eigenvector corresponding to the largest eigenvalue of the adjacency matrix of G. The constant alpha should be strictly less than the inverse of largest eigenvalue of the adjacency matrix for the algorithm to converge. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. When `\alpha = 1/\lambda_{max}` and `\beta=1` Katz centrality is the same as eigenvector centrality. References ---------- .. [1] M. Newman, Networks: An Introduction. Oxford University Press, USA, 2010, p. 720. See Also -------- katz_centrality_numpy eigenvector_centrality eigenvector_centrality_numpy pagerank hits """ from math import sqrt if len(G)==0: return {} nnodes=G.number_of_nodes() if nstart is None: # choose starting vector with entries of 0 x=dict([(n,0) for n in G]) else: x=nstart try: b = dict.fromkeys(G,float(beta)) except (TypeError,ValueError): b = beta if set(beta) != set(G): raise nx.NetworkXError('beta dictionary ' 'must have a value for every node') # make up to max_iter iterations for i in range(max_iter): xlast=x x=dict.fromkeys(xlast, 0) # do the multiplication y = Alpha * Ax - Beta for n in x: for nbr in G[n]: x[n] += xlast[nbr] * G[n][nbr].get('weight',1) x[n] = alpha*x[n] + b[n] # check convergence err=sum([abs(x[n]-xlast[n]) for n in x]) if err < nnodes*tol: if normalized: # normalize vector try: s=1.0/sqrt(sum(v**2 for v in x.values())) # this should never be zero? except ZeroDivisionError: s=1.0 else: s = 1 for n in x: x[n]*=s return x raise nx.NetworkXError('Power iteration failed to converge in ', '%d iterations."%(i+1))') @not_implemented_for('multigraph') def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True): r"""Compute the Katz centrality for the graph G. Katz centrality is related to eigenvalue centrality and PageRank. The Katz centrality for node `i` is .. math:: x_i = \alpha \sum_{j} A_{ij} x_j + \beta, where `A` is the adjacency matrix of the graph G with eigenvalues `\lambda`. The parameter `\beta` controls the initial centrality and .. math:: \alpha < \frac{1}{\lambda_{max}}. Katz centrality computes the relative influence of a node within a network by measuring the number of the immediate neighbors (first degree nodes) and also all other nodes in the network that connect to the node under consideration through these immediate neighbors. Extra weight can be provided to immediate neighbors through the parameter :math:`\beta`. Connections made with distant neighbors are, however, penalized by an attenuation factor `\alpha` which should be strictly less than the inverse largest eigenvalue of the adjacency matrix in order for the Katz centrality to be computed correctly. More information is provided in [1]_ . Parameters ---------- G : graph A NetworkX graph alpha : float Attenuation factor beta : scalar or dictionary, optional (default=1.0) Weight attributed to the immediate neighborhood. If not a scalar the dictionary must have an value for every node. normalized : bool If True normalize the resulting values. Returns ------- nodes : dictionary Dictionary of nodes with Katz centrality as the value. Examples -------- >>> import math >>> G = nx.path_graph(4) >>> phi = (1+math.sqrt(5))/2.0 # largest eigenvalue of adj matrix >>> centrality = nx.katz_centrality_numpy(G,1/phi) >>> for n,c in sorted(centrality.items()): ... print("%d %0.2f"%(n,c)) 0 0.37 1 0.60 2 0.60 3 0.37 Notes ------ This algorithm uses a direct linear solver to solve the above equation. The constant alpha should be strictly less than the inverse of largest eigenvalue of the adjacency matrix for there to be a solution. When `\alpha = 1/\lambda_{max}` and `\beta=1` Katz centrality is the same as eigenvector centrality. References ---------- .. [1] M. Newman, Networks: An Introduction. Oxford University Press, USA, 2010, p. 720. See Also -------- katz_centrality eigenvector_centrality_numpy eigenvector_centrality pagerank hits """ try: import numpy as np except ImportError: raise ImportError('Requires NumPy: http://scipy.org/') if len(G)==0: return {} try: nodelist = beta.keys() if set(nodelist) != set(G): raise nx.NetworkXError('beta dictionary ' 'must have a value for every node') b = np.array(list(beta.values()),dtype=float) except AttributeError: nodelist = G.nodes() try: b = np.ones((len(nodelist),1))*float(beta) except (TypeError,ValueError): raise nx.NetworkXError('beta must be a number') A=nx.adj_matrix(G, nodelist=nodelist) n = np.array(A).shape[0] centrality = np.linalg.solve( np.eye(n,n) - (alpha * A) , b) if normalized: norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) else: norm = 1.0 centrality=dict(zip(nodelist, map(float,centrality/norm))) return centrality # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy import numpy.linalg except: raise SkipTest("numpy not available") networkx-1.8.1/networkx/algorithms/centrality/eigenvector.py0000664000175000017500000001151312177456333024372 0ustar aricaric00000000000000""" Eigenvector centrality. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Sasha Gutfraind (ag362@cornell.edu)']) __all__ = ['eigenvector_centrality', 'eigenvector_centrality_numpy'] def eigenvector_centrality(G,max_iter=100,tol=1.0e-6,nstart=None): """Compute the eigenvector centrality for the graph G. Uses the power method to find the eigenvector for the largest eigenvalue of the adjacency matrix of G. Parameters ---------- G : graph A networkx graph max_iter : interger, optional Maximum number of iterations in power method. tol : float, optional Error tolerance used to check convergence in power method iteration. nstart : dictionary, optional Starting value of eigenvector iteration for each node. Returns ------- nodes : dictionary Dictionary of nodes with eigenvector centrality as the value. Examples -------- >>> G=nx.path_graph(4) >>> centrality=nx.eigenvector_centrality(G) >>> print(['%s %0.2f'%(node,centrality[node]) for node in centrality]) ['0 0.37', '1 0.60', '2 0.60', '3 0.37'] Notes ------ The eigenvector calculation is done by the power iteration method and has no guarantee of convergence. The iteration will stop after max_iter iterations or an error tolerance of number_of_nodes(G)*tol has been reached. For directed graphs this is "right" eigevector centrality. For "left" eigenvector centrality, first reverse the graph with G.reverse(). See Also -------- eigenvector_centrality_numpy pagerank hits """ from math import sqrt if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise nx.NetworkXException("Not defined for multigraphs.") if len(G)==0: raise nx.NetworkXException("Empty graph.") if nstart is None: # choose starting vector with entries of 1/len(G) x=dict([(n,1.0/len(G)) for n in G]) else: x=nstart # normalize starting vector s=1.0/sum(x.values()) for k in x: x[k]*=s nnodes=G.number_of_nodes() # make up to max_iter iterations for i in range(max_iter): xlast=x x=dict.fromkeys(xlast, 0) # do the multiplication y=Ax for n in x: for nbr in G[n]: x[n]+=xlast[nbr]*G[n][nbr].get('weight',1) # normalize vector try: s=1.0/sqrt(sum(v**2 for v in x.values())) # this should never be zero? except ZeroDivisionError: s=1.0 for n in x: x[n]*=s # check convergence err=sum([abs(x[n]-xlast[n]) for n in x]) if err < nnodes*tol: return x raise nx.NetworkXError("""eigenvector_centrality(): power iteration failed to converge in %d iterations."%(i+1))""") def eigenvector_centrality_numpy(G): """Compute the eigenvector centrality for the graph G. Parameters ---------- G : graph A networkx graph Returns ------- nodes : dictionary Dictionary of nodes with eigenvector centrality as the value. Examples -------- >>> G=nx.path_graph(4) >>> centrality=nx.eigenvector_centrality_numpy(G) >>> print(['%s %0.2f'%(node,centrality[node]) for node in centrality]) ['0 0.37', '1 0.60', '2 0.60', '3 0.37'] Notes ------ This algorithm uses the NumPy eigenvalue solver. For directed graphs this is "right" eigevector centrality. For "left" eigenvector centrality, first reverse the graph with G.reverse(). See Also -------- eigenvector_centrality pagerank hits """ try: import numpy as np except ImportError: raise ImportError('Requires NumPy: http://scipy.org/') if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise nx.NetworkXException('Not defined for multigraphs.') if len(G)==0: raise nx.NetworkXException('Empty graph.') A=nx.adj_matrix(G,nodelist=G.nodes()) eigenvalues,eigenvectors=np.linalg.eig(A) # eigenvalue indices in reverse sorted order ind=eigenvalues.argsort()[::-1] # eigenvector of largest eigenvalue at ind[0], normalized largest=np.array(eigenvectors[:,ind[0]]).flatten().real norm=np.sign(largest.sum())*np.linalg.norm(largest) centrality=dict(zip(G,map(float,largest/norm))) return centrality # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy import numpy.linalg except: raise SkipTest("numpy not available") networkx-1.8.1/networkx/algorithms/centrality/current_flow_closeness.py0000664000175000017500000001030512177456333026645 0ustar aricaric00000000000000""" Current-flow closeness centrality measures. """ # Copyright (C) 2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Aric Hagberg """ __all__ = ['current_flow_closeness_centrality','information_centrality'] import networkx as nx from networkx.algorithms.centrality.flow_matrix import * def current_flow_closeness_centrality(G, normalized=True, weight='weight', dtype=float, solver='lu'): """Compute current-flow closeness centrality for nodes. A variant of closeness centrality based on effective resistance between nodes in a network. This metric is also known as information centrality. Parameters ---------- G : graph A NetworkX graph normalized : bool, optional If True the values are normalized by 1/(n-1) where n is the number of nodes in G. dtype: data type (float) Default data type for internal matrices. Set to np.float32 for lower memory consumption. solver: string (default='lu') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). Returns ------- nodes : dictionary Dictionary of nodes with current flow closeness centrality as the value. See Also -------- closeness_centrality Notes ----- The algorithm is from Brandes [1]_. See also [2]_ for the original definition of information centrality. References ---------- .. [1] Ulrik Brandes and Daniel Fleischer, Centrality Measures Based on Current Flow. Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). LNCS 3404, pp. 533-544. Springer-Verlag, 2005. http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf .. [2] Stephenson, K. and Zelen, M. Rethinking centrality: Methods and examples. Social Networks. Volume 11, Issue 1, March 1989, pp. 1-37 http://dx.doi.org/10.1016/0378-8733(89)90016-6 """ from networkx.utils import reverse_cuthill_mckee_ordering try: import numpy as np except ImportError: raise ImportError('current_flow_closeness_centrality requires NumPy ', 'http://scipy.org/') try: import scipy except ImportError: raise ImportError('current_flow_closeness_centrality requires SciPy ', 'http://scipy.org/') if G.is_directed(): raise nx.NetworkXError('current_flow_closeness_centrality ', 'not defined for digraphs.') if G.is_directed(): raise nx.NetworkXError(\ "current_flow_closeness_centrality() not defined for digraphs.") if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") solvername={"full" :FullInverseLaplacian, "lu": SuperLUInverseLaplacian, "cg": CGInverseLaplacian} n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to H = nx.relabel_nodes(G,dict(zip(ordering,range(n)))) betweenness = dict.fromkeys(H,0.0) # b[v]=0 for v in H n = G.number_of_nodes() L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, dtype=dtype, format='csc') C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver for v in H: col=C2.get_row(v) for w in H: betweenness[v]+=col[v]-2*col[w] betweenness[w]+=col[v] if normalized: nb=len(betweenness)-1.0 else: nb=1.0 for v in H: betweenness[v]=nb/(betweenness[v]) return dict((ordering[k],float(v)) for k,v in betweenness.items()) information_centrality=current_flow_closeness_centrality # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/algorithms/centrality/load.py0000664000175000017500000001417312177456333023004 0ustar aricaric00000000000000""" Load centrality. """ # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Sasha Gutfraind (ag362@cornell.edu)']) __all__ = ['load_centrality', 'edge_load'] import networkx as nx def newman_betweenness_centrality(G,v=None,cutoff=None, normalized=True, weight=None): """Compute load centrality for nodes. The load centrality of a node is the fraction of all shortest paths that pass through that node. Parameters ---------- G : graph A networkx graph normalized : bool, optional If True the betweenness values are normalized by b=b/(n-1)(n-2) where n is the number of nodes in G. weight : None or string, optional If None, edge weights are ignored. Otherwise holds the name of the edge attribute used as weight. cutoff : bool, optional If specified, only consider paths of length <= cutoff. Returns ------- nodes : dictionary Dictionary of nodes with centrality as the value. See Also -------- betweenness_centrality() Notes ----- Load centrality is slightly different than betweenness. For this load algorithm see the reference Scientific collaboration networks: II. Shortest paths, weighted networks, and centrality, M. E. J. Newman, Phys. Rev. E 64, 016132 (2001). """ if v is not None: # only one node betweenness=0.0 for source in G: ubetween = _node_betweenness(G, source, cutoff, False, weight) betweenness += ubetween[v] if v in ubetween else 0 if normalized: order = G.order() if order <= 2: return betweenness # no normalization b=0 for all nodes betweenness *= 1.0 / ((order-1) * (order-2)) return betweenness else: betweenness = {}.fromkeys(G,0.0) for source in betweenness: ubetween = _node_betweenness(G, source, cutoff, False, weight) for vk in ubetween: betweenness[vk] += ubetween[vk] if normalized: order = G.order() if order <= 2: return betweenness # no normalization b=0 for all nodes scale = 1.0 / ((order-1) * (order-2)) for v in betweenness: betweenness[v] *= scale return betweenness # all nodes def _node_betweenness(G,source,cutoff=False,normalized=True,weight=None): """Node betweenness helper: see betweenness_centrality for what you probably want. This actually computes "load" and not betweenness. See https://networkx.lanl.gov/ticket/103 This calculates the load of each node for paths from a single source. (The fraction of number of shortests paths from source that go through each node.) To get the load for a node you need to do all-pairs shortest paths. If weight is not None then use Dijkstra for finding shortest paths. In this case a cutoff is not implemented and so is ignored. """ # get the predecessor and path length data if weight is None: (pred,length)=nx.predecessor(G,source,cutoff=cutoff,return_seen=True) else: (pred,length)=nx.dijkstra_predecessor_and_distance(G,source,weight=weight) # order the nodes by path length onodes = [ (l,vert) for (vert,l) in length.items() ] onodes.sort() onodes[:] = [vert for (l,vert) in onodes if l>0] # intialize betweenness between={}.fromkeys(length,1.0) while onodes: v=onodes.pop() if v in pred: num_paths=len(pred[v]) # Discount betweenness if more than for x in pred[v]: # one shortest path. if x==source: # stop if hit source because all remaining v break # also have pred[v]==[source] between[x]+=between[v]/float(num_paths) # remove source for v in between: between[v]-=1 # rescale to be between 0 and 1 if normalized: l=len(between) if l > 2: scale=1.0/float((l-1)*(l-2)) # 1/the number of possible paths for v in between: between[v] *= scale return between load_centrality=newman_betweenness_centrality def edge_load(G,nodes=None,cutoff=False): """Compute edge load. WARNING: This module is for demonstration and testing purposes. """ betweenness={} if not nodes: # find betweenness for every node in graph nodes=G.nodes() # that probably is what you want... for source in nodes: ubetween=_edge_betweenness(G,source,nodes,cutoff=cutoff) for v in ubetween.keys(): b=betweenness.setdefault(v,0) # get or set default betweenness[v]=ubetween[v]+b # cumulative total return betweenness def _edge_betweenness(G,source,nodes,cutoff=False): """ Edge betweenness helper. """ between={} # get the predecessor data #(pred,length)=_fast_predecessor(G,source,cutoff=cutoff) (pred,length)=nx.predecessor(G,source,cutoff=cutoff,return_seen=True) # order the nodes by path length onodes = [ nn for dd,nn in sorted( (dist,n) for n,dist in length.items() )] # intialize betweenness, doesn't account for any edge weights for u,v in G.edges(nodes): between[(u,v)]=1.0 between[(v,u)]=1.0 while onodes: # work through all paths v=onodes.pop() if v in pred: num_paths=len(pred[v]) # Discount betweenness if more than for w in pred[v]: # one shortest path. if w in pred: num_paths=len(pred[w]) # Discount betweenness, mult path for x in pred[w]: between[(w,x)]+=between[(v,w)]/num_paths between[(x,w)]+=between[(w,v)]/num_paths return between networkx-1.8.1/networkx/algorithms/centrality/__init__.py0000664000175000017500000000215612177456333023622 0ustar aricaric00000000000000from networkx.algorithms.centrality.betweenness import * from networkx.algorithms.centrality.betweenness_subset import * from networkx.algorithms.centrality.closeness import * from networkx.algorithms.centrality.current_flow_closeness import * from networkx.algorithms.centrality.current_flow_betweenness import * from networkx.algorithms.centrality.current_flow_betweenness_subset import * from networkx.algorithms.centrality.degree_alg import * from networkx.algorithms.centrality.eigenvector import * from networkx.algorithms.centrality.katz import * from networkx.algorithms.centrality.load import * from networkx.algorithms.centrality.communicability_alg import * import networkx.algorithms.centrality.betweenness import networkx.algorithms.centrality.closeness import networkx.algorithms.centrality.current_flow_betweenness import networkx.algorithms.centrality.current_flow_closeness import networkx.algorithms.centrality.degree_alg import networkx.algorithms.centrality.eigenvector import networkx.algorithms.centrality.load import networkx.algorithms.centrality.communicability_alg import networkx.algorithms.centrality.katz networkx-1.8.1/networkx/algorithms/centrality/closeness.py0000664000175000017500000000665612177456333024072 0ustar aricaric00000000000000""" Closeness centrality measures. """ # Copyright (C) 2004-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import functools import networkx as nx __author__ = "\n".join(['Aric Hagberg ', 'Pieter Swart (swart@lanl.gov)', 'Sasha Gutfraind (ag362@cornell.edu)']) __all__ = ['closeness_centrality'] def closeness_centrality(G, u=None, distance=None, normalized=True): r"""Compute closeness centrality for nodes. Closeness centrality [1]_ of a node `u` is the reciprocal of the sum of the shortest path distances from `u` to all `n-1` other nodes. Since the sum of distances depends on the number of nodes in the graph, closeness is normalized by the sum of minimum possible distances `n-1`. .. math:: C(u) = \frac{n - 1}{\sum_{v=1}^{n} d(v, u)}, where `d(v, u)` is the shortest-path distance between `v` and `u`, and `n` is the number of nodes in the graph. Notice that higher values of closeness indicate higher centrality. Parameters ---------- G : graph A NetworkX graph u : node, optional Return only the value for node u distance : edge attribute key, optional (default=None) Use the specified edge attribute as the edge distance in shortest path calculations normalized : bool, optional If True (default) normalize by the number of nodes in the connected part of the graph. Returns ------- nodes : dictionary Dictionary of nodes with closeness centrality as the value. See Also -------- betweenness_centrality, load_centrality, eigenvector_centrality, degree_centrality Notes ----- The closeness centrality is normalized to `(n-1)/(|G|-1)` where `n` is the number of nodes in the connected part of graph containing the node. If the graph is not completely connected, this algorithm computes the closeness centrality for each connected part separately. If the 'distance' keyword is set to an edge attribute key then the shortest-path length will be computed using Dijkstra's algorithm with that edge attribute as the edge weight. References ---------- .. [1] Freeman, L.C., 1979. Centrality in networks: I. Conceptual clarification. Social Networks 1, 215--239. http://www.soc.ucsb.edu/faculty/friedkin/Syllabi/Soc146/Freeman78.PDF """ if distance is not None: # use Dijkstra's algorithm with specified attribute as edge weight path_length = functools.partial(nx.single_source_dijkstra_path_length, weight=distance) else: path_length = nx.single_source_shortest_path_length if u is None: nodes = G.nodes() else: nodes = [u] closeness_centrality = {} for n in nodes: sp = path_length(G,n) totsp = sum(sp.values()) if totsp > 0.0 and len(G) > 1: closeness_centrality[n] = (len(sp)-1.0) / totsp # normalize to number of nodes-1 in connected part if normalized: s = (len(sp)-1.0) / ( len(G) - 1 ) closeness_centrality[n] *= s else: closeness_centrality[n] = 0.0 if u is not None: return closeness_centrality[u] else: return closeness_centrality networkx-1.8.1/networkx/algorithms/centrality/tests/0000775000175000017500000000000012177457361022651 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py0000664000175000017500000002174112177456333032432 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx from networkx import betweenness_centrality_subset,\ edge_betweenness_centrality_subset class TestSubsetBetweennessCentrality: def test_K5(self): """Betweenness centrality: K5""" G=networkx.complete_graph(5) b=betweenness_centrality_subset(G, sources=[0], targets=[1,3], weight=None) b_answer={0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P5_directed(self): """Betweenness centrality: P5 directed""" G=networkx.DiGraph() G.add_path(list(range(5))) b_answer={0:0,1:1,2:1,3:0,4:0,5:0} b=betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P5(self): """Betweenness centrality: P5""" G=networkx.Graph() G.add_path(list(range(5))) b_answer={0:0,1:0.5,2:0.5,3:0,4:0,5:0} b=betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P5_multiple_target(self): """Betweenness centrality: P5 multiple target""" G=networkx.Graph() G.add_path(list(range(5))) b_answer={0:0,1:1,2:1,3:0.5,4:0,5:0} b=betweenness_centrality_subset(G, sources=[0], targets=[3,4], weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_box(self): """Betweenness centrality: box""" G=networkx.Graph() G.add_edge(0,1) G.add_edge(0,2) G.add_edge(1,3) G.add_edge(2,3) b_answer={0:0,1:0.25,2:0.25,3:0} b=betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_box_and_path(self): """Betweenness centrality: box and path""" G=networkx.Graph() G.add_edge(0,1) G.add_edge(0,2) G.add_edge(1,3) G.add_edge(2,3) G.add_edge(3,4) G.add_edge(4,5) b_answer={0:0,1:0.5,2:0.5,3:0.5,4:0,5:0} b=betweenness_centrality_subset(G, sources=[0], targets=[3,4], weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_box_and_path2(self): """Betweenness centrality: box and path multiple target""" G=networkx.Graph() G.add_edge(0,1) G.add_edge(1,2) G.add_edge(2,3) G.add_edge(1,20) G.add_edge(20,3) G.add_edge(3,4) b_answer={0:0,1:1.0,2:0.5,20:0.5,3:0.5,4:0} b=betweenness_centrality_subset(G, sources=[0], targets=[3,4]) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) class TestBetweennessCentralitySources: def test_K5(self): """Betweenness centrality: K5""" G=networkx.complete_graph(5) b=networkx.betweenness_centrality_source(G, weight=None, normalized=False) b_answer={0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P3(self): """Betweenness centrality: P3""" G=networkx.path_graph(3) b_answer={0: 0.0, 1: 1.0, 2: 0.0} b=networkx.betweenness_centrality_source(G, weight=None, normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) class TestEdgeSubsetBetweennessCentrality: def test_K5(self): """Edge betweenness centrality: K5""" G=networkx.complete_graph(5) b=edge_betweenness_centrality_subset(G, sources=[0], targets=[1,3], weight=None) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,3)]=0.5 b_answer[(0,1)]=0.5 for n in sorted(G.edges()): print(n,b[n]) assert_almost_equal(b[n],b_answer[n]) def test_P5_directed(self): """Edge betweenness centrality: P5 directed""" G=networkx.DiGraph() G.add_path(list(range(5))) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,1)]=1 b_answer[(1,2)]=1 b_answer[(2,3)]=1 b=edge_betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_P5(self): """Edge betweenness centrality: P5""" G=networkx.Graph() G.add_path(list(range(5))) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,1)]=0.5 b_answer[(1,2)]=0.5 b_answer[(2,3)]=0.5 b=edge_betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_P5_multiple_target(self): """Edge betweenness centrality: P5 multiple target""" G=networkx.Graph() G.add_path(list(range(5))) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,1)]=1 b_answer[(1,2)]=1 b_answer[(2,3)]=1 b_answer[(3,4)]=0.5 b=edge_betweenness_centrality_subset(G, sources=[0], targets=[3,4], weight=None) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_box(self): """Edge etweenness centrality: box""" G=networkx.Graph() G.add_edge(0,1) G.add_edge(0,2) G.add_edge(1,3) G.add_edge(2,3) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,1)]=0.25 b_answer[(0,2)]=0.25 b_answer[(1,3)]=0.25 b_answer[(2,3)]=0.25 b=edge_betweenness_centrality_subset(G, sources=[0], targets=[3], weight=None) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_box_and_path(self): """Edge etweenness centrality: box and path""" G=networkx.Graph() G.add_edge(0,1) G.add_edge(0,2) G.add_edge(1,3) G.add_edge(2,3) G.add_edge(3,4) G.add_edge(4,5) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,1)]=1.0/2 b_answer[(0,2)]=1.0/2 b_answer[(1,3)]=1.0/2 b_answer[(2,3)]=1.0/2 b_answer[(3,4)]=1.0/2 b=edge_betweenness_centrality_subset(G, sources=[0], targets=[3,4], weight=None) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_box_and_path2(self): """Edge betweenness centrality: box and path multiple target""" G=networkx.Graph() G.add_edge(0,1) G.add_edge(1,2) G.add_edge(2,3) G.add_edge(1,20) G.add_edge(20,3) G.add_edge(3,4) b_answer=dict.fromkeys(G.edges(),0) b_answer[(0,1)]=1.0 b_answer[(1,20)]=1.0/2 b_answer[(3,20)]=1.0/2 b_answer[(1,2)]=1.0/2 b_answer[(2,3)]=1.0/2 b_answer[(3,4)]=1.0/2 b=edge_betweenness_centrality_subset(G, sources=[0], targets=[3,4], weight=None) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) networkx-1.8.1/networkx/algorithms/centrality/tests/test_load_centrality.py0000664000175000017500000002033012177456333027433 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestLoadCentrality: def setUp(self): G=nx.Graph(); G.add_edge(0,1,weight=3) G.add_edge(0,2,weight=2) G.add_edge(0,3,weight=6) G.add_edge(0,4,weight=4) G.add_edge(1,3,weight=5) G.add_edge(1,5,weight=5) G.add_edge(2,4,weight=1) G.add_edge(3,4,weight=2) G.add_edge(3,5,weight=1) G.add_edge(4,5,weight=4) self.G=G self.exact_weighted={0: 4.0, 1: 0.0, 2: 8.0, 3: 6.0, 4: 8.0, 5: 0.0} self.K = nx.krackhardt_kite_graph() self.P3 = nx.path_graph(3) self.P4 = nx.path_graph(4) self.K5 = nx.complete_graph(5) self.C4=nx.cycle_graph(4) self.T=nx.balanced_tree(r=2, h=2) self.Gb = nx.Graph() self.Gb.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (2, 4), (4, 5), (3, 5)]) self.F = nx.florentine_families_graph() self.D = nx.cycle_graph(3, create_using=nx.DiGraph()) self.D.add_edges_from([(3, 0), (4, 3)]) def test_not_strongly_connected(self): b = nx.load_centrality(self.D) result = {0: 5./12, 1: 1./4, 2: 1./12, 3: 1./4, 4: 0.000} for n in sorted(self.D): assert_almost_equal(result[n], b[n], places=3) assert_almost_equal(result[n], nx.load_centrality(self.D, n), places=3) def test_weighted_load(self): b=nx.load_centrality(self.G,weight='weight',normalized=False) for n in sorted(self.G): assert_equal(b[n],self.exact_weighted[n]) def test_k5_load(self): G=self.K5 c=nx.load_centrality(G) d={0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_p3_load(self): G=self.P3 c=nx.load_centrality(G) d={0: 0.000, 1: 1.000, 2: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) c=nx.load_centrality(G,v=1) assert_almost_equal(c,1.0) c=nx.load_centrality(G,v=1,normalized=True) assert_almost_equal(c,1.0) def test_p2_load(self): G=nx.path_graph(2) c=nx.load_centrality(G) d={0: 0.000, 1: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_krackhardt_load(self): G=self.K c=nx.load_centrality(G) d={0: 0.023, 1: 0.023, 2: 0.000, 3: 0.102, 4: 0.000, 5: 0.231, 6: 0.231, 7: 0.389, 8: 0.222, 9: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_florentine_families_load(self): G=self.F c=nx.load_centrality(G) d={'Acciaiuoli': 0.000, 'Albizzi': 0.211, 'Barbadori': 0.093, 'Bischeri': 0.104, 'Castellani': 0.055, 'Ginori': 0.000, 'Guadagni': 0.251, 'Lamberteschi': 0.000, 'Medici': 0.522, 'Pazzi': 0.000, 'Peruzzi': 0.022, 'Ridolfi': 0.117, 'Salviati': 0.143, 'Strozzi': 0.106, 'Tornabuoni': 0.090} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_unnormalized_k5_load(self): G=self.K5 c=nx.load_centrality(G,normalized=False) d={0: 0.000, 1: 0.000, 2: 0.000, 3: 0.000, 4: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_unnormalized_p3_load(self): G=self.P3 c=nx.load_centrality(G,normalized=False) d={0: 0.000, 1: 2.000, 2: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_unnormalized_krackhardt_load(self): G=self.K c=nx.load_centrality(G,normalized=False) d={0: 1.667, 1: 1.667, 2: 0.000, 3: 7.333, 4: 0.000, 5: 16.667, 6: 16.667, 7: 28.000, 8: 16.000, 9: 0.000} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_unnormalized_florentine_families_load(self): G=self.F c=nx.load_centrality(G,normalized=False) d={'Acciaiuoli': 0.000, 'Albizzi': 38.333, 'Barbadori': 17.000, 'Bischeri': 19.000, 'Castellani': 10.000, 'Ginori': 0.000, 'Guadagni': 45.667, 'Lamberteschi': 0.000, 'Medici': 95.000, 'Pazzi': 0.000, 'Peruzzi': 4.000, 'Ridolfi': 21.333, 'Salviati': 26.000, 'Strozzi': 19.333, 'Tornabuoni': 16.333} for n in sorted(G): assert_almost_equal(c[n],d[n],places=3) def test_load_betweenness_difference(self): # Difference Between Load and Betweenness # --------------------------------------- The smallest graph # that shows the difference between load and betweenness is # G=ladder_graph(3) (Graph B below) # Graph A and B are from Tao Zhou, Jian-Guo Liu, Bing-Hong # Wang: Comment on ``Scientific collaboration # networks. II. Shortest paths, weighted networks, and # centrality". http://arxiv.org/pdf/physics/0511084 # Notice that unlike here, their calculation adds to 1 to the # betweennes of every node i for every path from i to every # other node. This is exactly what it should be, based on # Eqn. (1) in their paper: the eqn is B(v) = \sum_{s\neq t, # s\neq v}{\frac{\sigma_{st}(v)}{\sigma_{st}}}, therefore, # they allow v to be the target node. # We follow Brandes 2001, who follows Freeman 1977 that make # the sum for betweenness of v exclude paths where v is either # the source or target node. To agree with their numbers, we # must additionally, remove edge (4,8) from the graph, see AC # example following (there is a mistake in the figure in their # paper - personal communication). # A = nx.Graph() # A.add_edges_from([(0,1), (1,2), (1,3), (2,4), # (3,5), (4,6), (4,7), (4,8), # (5,8), (6,9), (7,9), (8,9)]) B = nx.Graph() # ladder_graph(3) B.add_edges_from([(0,1), (0,2), (1,3), (2,3), (2,4), (4,5), (3,5)]) c = nx.load_centrality(B,normalized=False) d={0: 1.750, 1: 1.750, 2: 6.500, 3: 6.500, 4: 1.750, 5: 1.750} for n in sorted(B): assert_almost_equal(c[n],d[n],places=3) def test_c4_edge_load(self): G=self.C4 c = nx.edge_load(G) d={(0, 1): 6.000, (0, 3): 6.000, (1, 2): 6.000, (2, 3): 6.000} for n in G.edges(): assert_almost_equal(c[n],d[n],places=3) def test_p4_edge_load(self): G=self.P4 c = nx.edge_load(G) d={(0, 1): 6.000, (1, 2): 8.000, (2, 3): 6.000} for n in G.edges(): assert_almost_equal(c[n],d[n],places=3) def test_k5_edge_load(self): G=self.K5 c = nx.edge_load(G) d={(0, 1): 5.000, (0, 2): 5.000, (0, 3): 5.000, (0, 4): 5.000, (1, 2): 5.000, (1, 3): 5.000, (1, 4): 5.000, (2, 3): 5.000, (2, 4): 5.000, (3, 4): 5.000} for n in G.edges(): assert_almost_equal(c[n],d[n],places=3) def test_tree_edge_load(self): G=self.T c = nx.edge_load(G) d={(0, 1): 24.000, (0, 2): 24.000, (1, 3): 12.000, (1, 4): 12.000, (2, 5): 12.000, (2, 6): 12.000} for n in G.edges(): assert_almost_equal(c[n],d[n],places=3) networkx-1.8.1/networkx/algorithms/centrality/tests/test_closeness_centrality.py0000664000175000017500000000527212177456333030522 0ustar aricaric00000000000000""" Tests for degree centrality. """ from nose.tools import * import networkx as nx class TestClosenessCentrality: def setUp(self): self.K = nx.krackhardt_kite_graph() self.P3 = nx.path_graph(3) self.P4 = nx.path_graph(4) self.K5 = nx.complete_graph(5) self.C4=nx.cycle_graph(4) self.T=nx.balanced_tree(r=2, h=2) self.Gb = nx.Graph() self.Gb.add_edges_from([(0,1), (0,2), (1,3), (2,3), (2,4), (4,5), (3,5)]) F = nx.florentine_families_graph() self.F = F def test_k5_closeness(self): c=nx.closeness_centrality(self.K5) d={0: 1.000, 1: 1.000, 2: 1.000, 3: 1.000, 4: 1.000} for n in sorted(self.K5): assert_almost_equal(c[n],d[n],places=3) def test_p3_closeness(self): c=nx.closeness_centrality(self.P3) d={0: 0.667, 1: 1.000, 2: 0.667} for n in sorted(self.P3): assert_almost_equal(c[n],d[n],places=3) def test_krackhardt_closeness(self): c=nx.closeness_centrality(self.K) d={0: 0.529, 1: 0.529, 2: 0.500, 3: 0.600, 4: 0.500, 5: 0.643, 6: 0.643, 7: 0.600, 8: 0.429, 9: 0.310} for n in sorted(self.K): assert_almost_equal(c[n],d[n],places=3) def test_florentine_families_closeness(self): c=nx.closeness_centrality(self.F) d={'Acciaiuoli': 0.368, 'Albizzi': 0.483, 'Barbadori': 0.4375, 'Bischeri': 0.400, 'Castellani': 0.389, 'Ginori': 0.333, 'Guadagni': 0.467, 'Lamberteschi': 0.326, 'Medici': 0.560, 'Pazzi': 0.286, 'Peruzzi': 0.368, 'Ridolfi': 0.500, 'Salviati': 0.389, 'Strozzi': 0.4375, 'Tornabuoni': 0.483} for n in sorted(self.F): assert_almost_equal(c[n],d[n],places=3) def test_weighted_closeness(self): XG=nx.Graph() XG.add_weighted_edges_from([('s','u',10), ('s','x',5), ('u','v',1), ('u','x',2), ('v','y',1), ('x','u',3), ('x','v',5), ('x','y',2), ('y','s',7), ('y','v',6)]) c=nx.closeness_centrality(XG,distance='weight') d={'y': 0.200, 'x': 0.286, 's': 0.138, 'u': 0.235, 'v': 0.200} for n in sorted(XG): assert_almost_equal(c[n],d[n],places=3) networkx-1.8.1/networkx/algorithms/centrality/tests/test_degree_centrality.py0000664000175000017500000000574612177456333027765 0ustar aricaric00000000000000""" Unit tests for degree centrality. """ from nose.tools import * import networkx as nx class TestDegreeCentrality: def __init__(self): self.K = nx.krackhardt_kite_graph() self.P3 = nx.path_graph(3) self.K5 = nx.complete_graph(5) F = nx.Graph() # Florentine families F.add_edge('Acciaiuoli','Medici') F.add_edge('Castellani','Peruzzi') F.add_edge('Castellani','Strozzi') F.add_edge('Castellani','Barbadori') F.add_edge('Medici','Barbadori') F.add_edge('Medici','Ridolfi') F.add_edge('Medici','Tornabuoni') F.add_edge('Medici','Albizzi') F.add_edge('Medici','Salviati') F.add_edge('Salviati','Pazzi') F.add_edge('Peruzzi','Strozzi') F.add_edge('Peruzzi','Bischeri') F.add_edge('Strozzi','Ridolfi') F.add_edge('Strozzi','Bischeri') F.add_edge('Ridolfi','Tornabuoni') F.add_edge('Tornabuoni','Guadagni') F.add_edge('Albizzi','Ginori') F.add_edge('Albizzi','Guadagni') F.add_edge('Bischeri','Guadagni') F.add_edge('Guadagni','Lamberteschi') self.F = F G = nx.DiGraph() G.add_edge(0,5) G.add_edge(1,5) G.add_edge(2,5) G.add_edge(3,5) G.add_edge(4,5) G.add_edge(5,6) G.add_edge(5,7) G.add_edge(5,8) self.G = G def test_degree_centrality_1(self): d = nx.degree_centrality(self.K5) exact = dict(zip(range(5), [1]*5)) for n,dc in d.items(): assert_almost_equal(exact[n], dc) def test_degree_centrality_2(self): d = nx.degree_centrality(self.P3) exact = {0:0.5, 1:1, 2:0.5} for n,dc in d.items(): assert_almost_equal(exact[n], dc) def test_degree_centrality_3(self): d = nx.degree_centrality(self.K) exact = {0:.444, 1:.444, 2:.333, 3:.667, 4:.333, 5:.556, 6:.556, 7:.333, 8:.222, 9:.111} for n,dc in d.items(): assert_almost_equal(exact[n], float("%5.3f" % dc)) def test_degree_centrality_4(self): d = nx.degree_centrality(self.F) names = sorted(self.F.nodes()) dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286, 0.071, 0.429, 0.071, 0.214, 0.214, 0.143, 0.286, 0.214] exact = dict(zip(names, dcs)) for n,dc in d.items(): assert_almost_equal(exact[n], float("%5.3f" % dc)) def test_indegree_centrality(self): d = nx.in_degree_centrality(self.G) exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.625, 6: 0.125, 7: 0.125, 8: 0.125} for n,dc in d.items(): assert_almost_equal(exact[n], dc) def test_outdegree_centrality(self): d = nx.out_degree_centrality(self.G) exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.375, 6: 0.0, 7: 0.0, 8: 0.0} for n,dc in d.items(): assert_almost_equal(exact[n], dc) ././@LongLink0000000000000000000000000000014700000000000011217 Lustar 00000000000000networkx-1.8.1/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.pynetworkx-1.8.1/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.0000664000175000017500000001703412177456333034652 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx from nose.plugins.attrib import attr from networkx import edge_current_flow_betweenness_centrality \ as edge_current_flow from networkx import edge_current_flow_betweenness_centrality_subset \ as edge_current_flow_subset class TestFlowBetweennessCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np import scipy except ImportError: raise SkipTest('NumPy not available.') def test_K4_normalized(self): """Betweenness centrality: K4""" G=networkx.complete_graph(4) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True) b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_K4(self): """Betweenness centrality: K4""" G=networkx.complete_graph(4) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True) b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) # test weighted network G.add_edge(0,1,{'weight':0.5,'other':0.3}) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True, weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True) b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True, weight='other') b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True,weight='other') for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P4_normalized(self): """Betweenness centrality: P4 normalized""" G=networkx.path_graph(4) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True) b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P4(self): """Betweenness centrality: P4""" G=networkx.path_graph(4) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True) b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_star(self): """Betweenness centrality: star """ G=networkx.Graph() G.add_star(['a','b','c','d']) b=networkx.current_flow_betweenness_centrality_subset(G, G.nodes(), G.nodes(), normalized=True) b_answer=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) # class TestWeightedFlowBetweennessCentrality(): # pass class TestEdgeFlowBetweennessCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np import scipy except ImportError: raise SkipTest('NumPy not available.') def test_K4_normalized(self): """Betweenness centrality: K4""" G=networkx.complete_graph(4) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=True) b_answer=edge_current_flow(G,normalized=True) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) def test_K4(self): """Betweenness centrality: K4""" G=networkx.complete_graph(4) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=False) b_answer=edge_current_flow(G,normalized=False) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) # test weighted network G.add_edge(0,1,{'weight':0.5,'other':0.3}) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=False,weight=None) # weight is None => same as unweighted network for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=False) b_answer=edge_current_flow(G,normalized=False) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=False,weight='other') b_answer=edge_current_flow(G,normalized=False,weight='other') for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) def test_C4(self): """Edge betweenness centrality: C4""" G=networkx.cycle_graph(4) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=True) b_answer=edge_current_flow(G,normalized=True) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) def test_P4(self): """Edge betweenness centrality: P4""" G=networkx.path_graph(4) b=edge_current_flow_subset(G,G.nodes(),G.nodes(),normalized=True) b_answer=edge_current_flow(G,normalized=True) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) networkx-1.8.1/networkx/algorithms/centrality/tests/test_katz_centrality.py0000664000175000017500000002405112177456333027471 0ustar aricaric00000000000000# -*- coding: utf-8 -*- import math from nose import SkipTest from nose.tools import * import networkx class TestKatzCentrality(object): def test_K5(self): """Katz centrality: K5""" G = networkx.complete_graph(5) alpha = 0.1 b = networkx.katz_centrality(G, alpha) v = math.sqrt(1 / 5.0) b_answer = dict.fromkeys(G, v) for n in sorted(G): assert_almost_equal(b[n], b_answer[n]) nstart = dict([(n, 1) for n in G]) b = networkx.katz_centrality(G, alpha, nstart=nstart) for n in sorted(G): assert_almost_equal(b[n], b_answer[n]) def test_P3(self): """Katz centrality: P3""" alpha = 0.1 G = networkx.path_graph(3) b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = networkx.katz_centrality(G, alpha) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=4) @raises(networkx.NetworkXError) def test_maxiter(self): alpha = 0.1 G = networkx.path_graph(3) b = networkx.katz_centrality(G, alpha, max_iter=0) def test_beta_as_scalar(self): alpha = 0.1 beta = 0.1 b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = networkx.path_graph(3) b = networkx.katz_centrality(G, alpha, beta) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=4) def test_beta_as_dict(self): alpha = 0.1 beta = {0: 1.0, 1: 1.0, 2: 1.0} b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = networkx.path_graph(3) b = networkx.katz_centrality(G, alpha, beta) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=4) def test_multiple_alpha(self): alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] for alpha in alpha_list: b_answer = {0.1: {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}, 0.2: {0: 0.5454545454545454, 1: 0.6363636363636365, 2: 0.5454545454545454}, 0.3: {0: 0.5333964609104419, 1: 0.6564879518897746, 2: 0.5333964609104419}, 0.4: {0: 0.5232045649263551, 1: 0.6726915834767423, 2: 0.5232045649263551}, 0.5: {0: 0.5144957746691622, 1: 0.6859943117075809, 2: 0.5144957746691622}, 0.6: {0: 0.5069794004195823, 1: 0.6970966755769258, 2: 0.5069794004195823}} G = networkx.path_graph(3) b = networkx.katz_centrality(G, alpha) for n in sorted(G): assert_almost_equal(b[n], b_answer[alpha][n], places=4) @raises(networkx.NetworkXException) def test_multigraph(self): e = networkx.katz_centrality(networkx.MultiGraph(), 0.1) def test_empty(self): e = networkx.katz_centrality(networkx.Graph(), 0.1) assert_equal(e, {}) @raises(networkx.NetworkXException) def test_bad_beta(self): G = networkx.Graph([(0,1)]) beta = {0:77} e = networkx.katz_centrality(G, 0.1,beta=beta) @raises(networkx.NetworkXException) def test_bad_beta_numbe(self): G = networkx.Graph([(0,1)]) e = networkx.katz_centrality(G, 0.1,beta='foo') class TestKatzCentralityNumpy(object): numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') def test_K5(self): """Katz centrality: K5""" G = networkx.complete_graph(5) alpha = 0.1 b = networkx.katz_centrality(G, alpha) v = math.sqrt(1 / 5.0) b_answer = dict.fromkeys(G, v) for n in sorted(G): assert_almost_equal(b[n], b_answer[n]) nstart = dict([(n, 1) for n in G]) b = networkx.eigenvector_centrality_numpy(G) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=3) def test_P3(self): """Katz centrality: P3""" alpha = 0.1 G = networkx.path_graph(3) b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} b = networkx.katz_centrality_numpy(G, alpha) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=4) def test_beta_as_scalar(self): alpha = 0.1 beta = 0.1 b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = networkx.path_graph(3) b = networkx.katz_centrality_numpy(G, alpha, beta) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=4) def test_beta_as_dict(self): alpha = 0.1 beta = {0: 1.0, 1: 1.0, 2: 1.0} b_answer = {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162} G = networkx.path_graph(3) b = networkx.katz_centrality_numpy(G, alpha, beta) for n in sorted(G): assert_almost_equal(b[n], b_answer[n], places=4) def test_multiple_alpha(self): alpha_list = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] for alpha in alpha_list: b_answer = {0.1: {0: 0.5598852584152165, 1: 0.6107839182711449, 2: 0.5598852584152162}, 0.2: {0: 0.5454545454545454, 1: 0.6363636363636365, 2: 0.5454545454545454}, 0.3: {0: 0.5333964609104419, 1: 0.6564879518897746, 2: 0.5333964609104419}, 0.4: {0: 0.5232045649263551, 1: 0.6726915834767423, 2: 0.5232045649263551}, 0.5: {0: 0.5144957746691622, 1: 0.6859943117075809, 2: 0.5144957746691622}, 0.6: {0: 0.5069794004195823, 1: 0.6970966755769258, 2: 0.5069794004195823}} G = networkx.path_graph(3) b = networkx.katz_centrality_numpy(G, alpha) for n in sorted(G): assert_almost_equal(b[n], b_answer[alpha][n], places=4) @raises(networkx.NetworkXException) def test_multigraph(self): e = networkx.katz_centrality(networkx.MultiGraph(), 0.1) def test_empty(self): e = networkx.katz_centrality(networkx.Graph(), 0.1) assert_equal(e, {}) @raises(networkx.NetworkXException) def test_bad_beta(self): G = networkx.Graph([(0,1)]) beta = {0:77} e = networkx.katz_centrality_numpy(G, 0.1,beta=beta) @raises(networkx.NetworkXException) def test_bad_beta_numbe(self): G = networkx.Graph([(0,1)]) e = networkx.katz_centrality_numpy(G, 0.1,beta='foo') class TestKatzCentralityDirected(object): def setUp(self): G = networkx.DiGraph() edges = [(1, 2),(1, 3),(2, 4),(3, 2),(3, 5),(4, 2),(4, 5),(4, 6),(5, 6), (5, 7),(5, 8),(6, 8),(7, 1),(7, 5),(7, 8),(8, 6),(8, 7)] G.add_edges_from(edges, weight=2.0) self.G = G self.G.alpha = 0.1 self.G.evc = [ 0.3289589783189635, 0.2832077296243516, 0.3425906003685471, 0.3970420865198392, 0.41074871061646284, 0.272257430756461, 0.4201989685435462, 0.34229059218038554, ] H = networkx.DiGraph(edges) self.H = G self.H.alpha = 0.1 self.H.evc = [ 0.3289589783189635, 0.2832077296243516, 0.3425906003685471, 0.3970420865198392, 0.41074871061646284, 0.272257430756461, 0.4201989685435462, 0.34229059218038554, ] def test_eigenvector_centrality_weighted(self): G = self.G alpha = self.G.alpha p = networkx.katz_centrality(G, alpha) for (a, b) in zip(list(p.values()), self.G.evc): assert_almost_equal(a, b) def test_eigenvector_centrality_unweighted(self): G = self.H alpha = self.H.alpha p = networkx.katz_centrality(G, alpha) for (a, b) in zip(list(p.values()), self.G.evc): assert_almost_equal(a, b) class TestKatzCentralityDirectedNumpy(TestKatzCentralityDirected): numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') def test_eigenvector_centrality_weighted(self): G = self.G alpha = self.G.alpha p = networkx.katz_centrality_numpy(G, alpha) for (a, b) in zip(list(p.values()), self.G.evc): assert_almost_equal(a, b) def test_eigenvector_centrality_unweighted(self): G = self.H alpha = self.H.alpha p = networkx.katz_centrality_numpy(G, alpha) for (a, b) in zip(list(p.values()), self.G.evc): assert_almost_equal(a, b) class TestKatzEigenvectorVKatz(object): numpy = 1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np global eigvals try: import numpy as np from numpy.linalg import eigvals except ImportError: raise SkipTest('NumPy not available.') def test_eigenvector_v_katz_random(self): G = networkx.gnp_random_graph(10,0.5) l = float(max(eigvals(networkx.adjacency_matrix(G)))) e = networkx.eigenvector_centrality_numpy(G) k = networkx.katz_centrality_numpy(G, 1.0/l) for n in G: assert_almost_equal(e[n], k[n]) networkx-1.8.1/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py0000664000175000017500000001705112177456333033635 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx from nose.plugins.attrib import attr from networkx import edge_current_flow_betweenness_centrality \ as edge_current_flow from networkx import approximate_current_flow_betweenness_centrality \ as approximate_cfbc class TestFlowBetweennessCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np import scipy except ImportError: raise SkipTest('NumPy not available.') def test_K4_normalized(self): """Betweenness centrality: K4""" G=networkx.complete_graph(4) b=networkx.current_flow_betweenness_centrality(G,normalized=True) b_answer={0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) G.add_edge(0,1,{'weight':0.5,'other':0.3}) b=networkx.current_flow_betweenness_centrality(G,normalized=True,weight=None) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) wb_answer={0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555} b=networkx.current_flow_betweenness_centrality(G,normalized=True) for n in sorted(G): assert_almost_equal(b[n],wb_answer[n]) wb_answer={0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358} b=networkx.current_flow_betweenness_centrality(G,normalized=True,weight='other') for n in sorted(G): assert_almost_equal(b[n],wb_answer[n]) def test_K4(self): """Betweenness centrality: K4""" G=networkx.complete_graph(4) for solver in ['full','lu','cg']: b=networkx.current_flow_betweenness_centrality(G, normalized=False, solver=solver) b_answer={0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P4_normalized(self): """Betweenness centrality: P4 normalized""" G=networkx.path_graph(4) b=networkx.current_flow_betweenness_centrality(G,normalized=True) b_answer={0: 0, 1: 2./3, 2: 2./3, 3:0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P4(self): """Betweenness centrality: P4""" G=networkx.path_graph(4) b=networkx.current_flow_betweenness_centrality(G,normalized=False) b_answer={0: 0, 1: 2, 2: 2, 3: 0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_star(self): """Betweenness centrality: star """ G=networkx.Graph() G.add_star(['a','b','c','d']) b=networkx.current_flow_betweenness_centrality(G,normalized=True) b_answer={'a': 1.0, 'b': 0.0, 'c': 0.0, 'd':0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_solers(self): """Betweenness centrality: alternate solvers""" G=networkx.complete_graph(4) for solver in ['full','lu','cg']: b=networkx.current_flow_betweenness_centrality(G,normalized=False, solver=solver) b_answer={0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) class TestApproximateFlowBetweennessCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np global assert_allclose try: import numpy as np import scipy from numpy.testing import assert_allclose except ImportError: raise SkipTest('NumPy not available.') def test_K4_normalized(self): "Approximate current-flow betweenness centrality: K4 normalized" G=networkx.complete_graph(4) b=networkx.current_flow_betweenness_centrality(G,normalized=True) epsilon=0.1 ba = approximate_cfbc(G,normalized=True, epsilon=0.5*epsilon) for n in sorted(G): assert_allclose(b[n],ba[n],atol=epsilon) def test_K4(self): "Approximate current-flow betweenness centrality: K4" G=networkx.complete_graph(4) b=networkx.current_flow_betweenness_centrality(G,normalized=False) epsilon=0.1 ba = approximate_cfbc(G,normalized=False, epsilon=0.5*epsilon) for n in sorted(G): assert_allclose(b[n],ba[n],atol=epsilon*len(G)**2) def test_star(self): "Approximate current-flow betweenness centrality: star" G=networkx.Graph() G.add_star(['a','b','c','d']) b=networkx.current_flow_betweenness_centrality(G,normalized=True) epsilon=0.1 ba = approximate_cfbc(G,normalized=True, epsilon=0.5*epsilon) for n in sorted(G): assert_allclose(b[n],ba[n],atol=epsilon) def test_grid(self): "Approximate current-flow betweenness centrality: 2d grid" G=networkx.grid_2d_graph(4,4) b=networkx.current_flow_betweenness_centrality(G,normalized=True) epsilon=0.1 ba = approximate_cfbc(G,normalized=True, epsilon=0.5*epsilon) for n in sorted(G): assert_allclose(b[n],ba[n],atol=epsilon) def test_solvers(self): "Approximate current-flow betweenness centrality: solvers" G=networkx.complete_graph(4) epsilon=0.1 for solver in ['full','lu','cg']: b=approximate_cfbc(G,normalized=False,solver=solver, epsilon=0.5*epsilon) b_answer={0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75} for n in sorted(G): assert_allclose(b[n],b_answer[n],atol=epsilon) class TestWeightedFlowBetweennessCentrality(object): pass class TestEdgeFlowBetweennessCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np import scipy except ImportError: raise SkipTest('NumPy not available.') def test_K4(self): """Edge flow betweenness centrality: K4""" G=networkx.complete_graph(4) b=edge_current_flow(G,normalized=True) b_answer=dict.fromkeys(G.edges(),0.25) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) def test_K4_normalized(self): """Edge flow betweenness centrality: K4""" G=networkx.complete_graph(4) b=edge_current_flow(G,normalized=False) b_answer=dict.fromkeys(G.edges(),0.75) for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) def test_C4(self): """Edge flow betweenness centrality: C4""" G=networkx.cycle_graph(4) b=edge_current_flow(G,normalized=False) b_answer={(0, 1):1.25,(0, 3):1.25, (1, 2):1.25, (2, 3): 1.25} for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) def test_P4(self): """Edge betweenness centrality: P4""" G=networkx.path_graph(4) b=edge_current_flow(G,normalized=False) b_answer={(0, 1):1.5,(1, 2):2.0, (2, 3):1.5} for (s,t),v1 in b_answer.items(): v2=b.get((s,t),b.get((t,s))) assert_almost_equal(v1,v2) networkx-1.8.1/networkx/algorithms/centrality/tests/test_betweenness_centrality.py0000664000175000017500000004123512177456333031045 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx def weighted_G(): G=nx.Graph(); G.add_edge(0,1,weight=3) G.add_edge(0,2,weight=2) G.add_edge(0,3,weight=6) G.add_edge(0,4,weight=4) G.add_edge(1,3,weight=5) G.add_edge(1,5,weight=5) G.add_edge(2,4,weight=1) G.add_edge(3,4,weight=2) G.add_edge(3,5,weight=1) G.add_edge(4,5,weight=4) return G class TestBetweennessCentrality(object): def test_K5(self): """Betweenness centrality: K5""" G=nx.complete_graph(5) b=nx.betweenness_centrality(G, weight=None, normalized=False) b_answer={0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_K5_endpoints(self): """Betweenness centrality: K5 endpoints""" G=nx.complete_graph(5) b=nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) b_answer={0: 4.0, 1: 4.0, 2: 4.0, 3: 4.0, 4: 4.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P3_normalized(self): """Betweenness centrality: P3 normalized""" G=nx.path_graph(3) b=nx.betweenness_centrality(G, weight=None, normalized=True) b_answer={0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P3(self): """Betweenness centrality: P3""" G=nx.path_graph(3) b_answer={0: 0.0, 1: 1.0, 2: 0.0} b=nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P3_endpoints(self): """Betweenness centrality: P3 endpoints""" G=nx.path_graph(3) b_answer={0: 2.0, 1: 3.0, 2: 2.0} b=nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_krackhardt_kite_graph(self): """Betweenness centrality: Krackhardt kite graph""" G=nx.krackhardt_kite_graph() b_answer={0: 1.667,1: 1.667,2: 0.000,3: 7.333,4: 0.000, 5: 16.667,6: 16.667,7: 28.000,8: 16.000,9: 0.000} for b in b_answer: b_answer[b]/=2.0 b=nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_krackhardt_kite_graph_normalized(self): """Betweenness centrality: Krackhardt kite graph normalized""" G=nx.krackhardt_kite_graph() b_answer={0:0.023,1:0.023,2:0.000,3:0.102,4:0.000, 5:0.231,6:0.231,7:0.389,8:0.222,9:0.000} b=nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_florentine_families_graph(self): """Betweenness centrality: Florentine families graph""" G=nx.florentine_families_graph() b_answer=\ {'Acciaiuoli': 0.000, 'Albizzi': 0.212, 'Barbadori': 0.093, 'Bischeri': 0.104, 'Castellani': 0.055, 'Ginori': 0.000, 'Guadagni': 0.255, 'Lamberteschi': 0.000, 'Medici': 0.522, 'Pazzi': 0.000, 'Peruzzi': 0.022, 'Ridolfi': 0.114, 'Salviati': 0.143, 'Strozzi': 0.103, 'Tornabuoni': 0.092} b=nx.betweenness_centrality(G, weight=None, normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_ladder_graph(self): """Betweenness centrality: Ladder graph""" G = nx.Graph() # ladder_graph(3) G.add_edges_from([(0,1), (0,2), (1,3), (2,3), (2,4), (4,5), (3,5)]) b_answer={0:1.667,1: 1.667,2: 6.667, 3: 6.667,4: 1.667,5: 1.667} for b in b_answer: b_answer[b]/=2.0 b=nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_disconnected_path(self): """Betweenness centrality: disconnected path""" G=nx.Graph() G.add_path([0,1,2]) G.add_path([3,4,5,6]) b_answer={0:0,1:1,2:0,3:0,4:2,5:2,6:0} b=nx.betweenness_centrality(G, weight=None, normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_disconnected_path_endpoints(self): """Betweenness centrality: disconnected path endpoints""" G=nx.Graph() G.add_path([0,1,2]) G.add_path([3,4,5,6]) b_answer={0:2,1:3,2:2,3:3,4:5,5:5,6:3} b=nx.betweenness_centrality(G, weight=None, normalized=False, endpoints=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_directed_path(self): """Betweenness centrality: directed path""" G=nx.DiGraph() G.add_path([0,1,2]) b=nx.betweenness_centrality(G, weight=None, normalized=False) b_answer={0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_directed_path_normalized(self): """Betweenness centrality: directed path normalized""" G=nx.DiGraph() G.add_path([0,1,2]) b=nx.betweenness_centrality(G, weight=None, normalized=True) b_answer={0: 0.0, 1: 0.5, 2: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) class TestWeightedBetweennessCentrality(object): def test_K5(self): """Weighted betweenness centrality: K5""" G=nx.complete_graph(5) b=nx.betweenness_centrality(G, weight='weight', normalized=False) b_answer={0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P3_normalized(self): """Weighted betweenness centrality: P3 normalized""" G=nx.path_graph(3) b=nx.betweenness_centrality(G, weight='weight', normalized=True) b_answer={0: 0.0, 1: 1.0, 2: 0.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P3(self): """Weighted betweenness centrality: P3""" G=nx.path_graph(3) b_answer={0: 0.0, 1: 1.0, 2: 0.0} b=nx.betweenness_centrality(G, weight='weight', normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_krackhardt_kite_graph(self): """Weighted betweenness centrality: Krackhardt kite graph""" G=nx.krackhardt_kite_graph() b_answer={0: 1.667,1: 1.667,2: 0.000,3: 7.333,4: 0.000, 5: 16.667,6: 16.667,7: 28.000,8: 16.000,9: 0.000} for b in b_answer: b_answer[b]/=2.0 b=nx.betweenness_centrality(G, weight='weight', normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_krackhardt_kite_graph_normalized(self): """Weighted betweenness centrality: Krackhardt kite graph normalized """ G=nx.krackhardt_kite_graph() b_answer={0:0.023,1:0.023,2:0.000,3:0.102,4:0.000, 5:0.231,6:0.231,7:0.389,8:0.222,9:0.000} b=nx.betweenness_centrality(G, weight='weight', normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_florentine_families_graph(self): """Weighted betweenness centrality: Florentine families graph""" G=nx.florentine_families_graph() b_answer=\ {'Acciaiuoli': 0.000, 'Albizzi': 0.212, 'Barbadori': 0.093, 'Bischeri': 0.104, 'Castellani': 0.055, 'Ginori': 0.000, 'Guadagni': 0.255, 'Lamberteschi': 0.000, 'Medici': 0.522, 'Pazzi': 0.000, 'Peruzzi': 0.022, 'Ridolfi': 0.114, 'Salviati': 0.143, 'Strozzi': 0.103, 'Tornabuoni': 0.092} b=nx.betweenness_centrality(G, weight='weight', normalized=True) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_ladder_graph(self): """Weighted betweenness centrality: Ladder graph""" G = nx.Graph() # ladder_graph(3) G.add_edges_from([(0,1), (0,2), (1,3), (2,3), (2,4), (4,5), (3,5)]) b_answer={0:1.667,1: 1.667,2: 6.667, 3: 6.667,4: 1.667,5: 1.667} for b in b_answer: b_answer[b]/=2.0 b=nx.betweenness_centrality(G, weight='weight', normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_G(self): """Weighted betweenness centrality: G""" G = weighted_G() b_answer={0: 2.0, 1: 0.0, 2: 4.0, 3: 3.0, 4: 4.0, 5: 0.0} b=nx.betweenness_centrality(G, weight='weight', normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_G2(self): """Weighted betweenness centrality: G2""" G=nx.DiGraph() G.add_weighted_edges_from([('s','u',10) ,('s','x',5) , ('u','v',1) ,('u','x',2) , ('v','y',1) ,('x','u',3) , ('x','v',5) ,('x','y',2) , ('y','s',7) ,('y','v',6)]) b_answer={'y':5.0,'x':5.0,'s':4.0,'u':2.0,'v':2.0} b=nx.betweenness_centrality(G, weight='weight', normalized=False) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) class TestEdgeBetweennessCentrality(object): def test_K5(self): """Edge betweenness centrality: K5""" G=nx.complete_graph(5) b=nx.edge_betweenness_centrality(G, weight=None, normalized=False) b_answer=dict.fromkeys(G.edges(),1) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_normalized_K5(self): """Edge betweenness centrality: K5""" G=nx.complete_graph(5) b=nx.edge_betweenness_centrality(G, weight=None, normalized=True) b_answer=dict.fromkeys(G.edges(),1/10.0) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_C4(self): """Edge betweenness centrality: C4""" G=nx.cycle_graph(4) b=nx.edge_betweenness_centrality(G, weight=None, normalized=True) b_answer={(0, 1):2,(0, 3):2, (1, 2):2, (2, 3): 2} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]/6.0) def test_P4(self): """Edge betweenness centrality: P4""" G=nx.path_graph(4) b=nx.edge_betweenness_centrality(G, weight=None, normalized=False) b_answer={(0, 1):3,(1, 2):4, (2, 3):3} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_normalized_P4(self): """Edge betweenness centrality: P4""" G=nx.path_graph(4) b=nx.edge_betweenness_centrality(G, weight=None, normalized=True) b_answer={(0, 1):3,(1, 2):4, (2, 3):3} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]/6.0) def test_balanced_tree(self): """Edge betweenness centrality: balanced tree""" G=nx.balanced_tree(r=2,h=2) b=nx.edge_betweenness_centrality(G, weight=None, normalized=False) b_answer={(0, 1):12,(0, 2):12, (1, 3):6,(1, 4):6,(2, 5):6,(2,6):6} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) class TestWeightedEdgeBetweennessCentrality(object): def test_K5(self): """Edge betweenness centrality: K5""" G=nx.complete_graph(5) b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False) b_answer=dict.fromkeys(G.edges(),1) for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_C4(self): """Edge betweenness centrality: C4""" G=nx.cycle_graph(4) b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False) b_answer={(0, 1):2,(0, 3):2, (1, 2):2, (2, 3): 2} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_P4(self): """Edge betweenness centrality: P4""" G=nx.path_graph(4) b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False) b_answer={(0, 1):3,(1, 2):4, (2, 3):3} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_balanced_tree(self): """Edge betweenness centrality: balanced tree""" G=nx.balanced_tree(r=2,h=2) b=nx.edge_betweenness_centrality(G, weight='weight', normalized=False) b_answer={(0, 1):12,(0, 2):12, (1, 3):6,(1, 4):6,(2, 5):6,(2,6):6} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_weighted_graph(self): eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3), (0, 4, 2), (1, 2, 4), (1, 3, 1), (1, 4, 3), (2, 4, 5), (3, 4, 4)] G = nx.Graph() G.add_weighted_edges_from(eList) b = nx.edge_betweenness_centrality(G, weight='weight', normalized=False) b_answer={(0, 1):0.0, (0, 2):1.0, (0, 3):2.0, (0, 4):1.0, (1, 2):2.0, (1, 3):3.5, (1, 4):1.5, (2, 4):1.0, (3, 4):0.5} for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]) def test_normalized_weighted_graph(self): eList = [(0, 1, 5), (0, 2, 4), (0, 3, 3), (0, 4, 2), (1, 2, 4), (1, 3, 1), (1, 4, 3), (2, 4, 5), (3, 4, 4)] G = nx.Graph() G.add_weighted_edges_from(eList) b = nx.edge_betweenness_centrality(G, weight='weight', normalized=True) b_answer={(0, 1):0.0, (0, 2):1.0, (0, 3):2.0, (0, 4):1.0, (1, 2):2.0, (1, 3):3.5, (1, 4):1.5, (2, 4):1.0, (3, 4):0.5} norm = len(G)*(len(G)-1)/2.0 for n in sorted(G.edges()): assert_almost_equal(b[n],b_answer[n]/norm) networkx-1.8.1/networkx/algorithms/centrality/tests/test_eigenvector_centrality.py0000664000175000017500000000753612177456333031043 0ustar aricaric00000000000000#!/usr/bin/env python import math from nose import SkipTest from nose.tools import * import networkx class TestEigenvectorCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') def test_K5(self): """Eigenvector centrality: K5""" G=networkx.complete_graph(5) b=networkx.eigenvector_centrality(G) v=math.sqrt(1/5.0) b_answer=dict.fromkeys(G,v) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) nstart = dict([(n,1) for n in G]) b=networkx.eigenvector_centrality(G,nstart=nstart) for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) b=networkx.eigenvector_centrality_numpy(G) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=3) def test_P3(self): """Eigenvector centrality: P3""" G=networkx.path_graph(3) b_answer={0: 0.5, 1: 0.7071, 2: 0.5} b=networkx.eigenvector_centrality_numpy(G) for n in sorted(G): assert_almost_equal(b[n],b_answer[n],places=4) @raises(networkx.NetworkXError) def test_maxiter(self): G=networkx.path_graph(3) b=networkx.eigenvector_centrality(G,max_iter=0) class TestEigenvectorCentralityDirected(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') def setUp(self): G=networkx.DiGraph() edges=[(1,2),(1,3),(2,4),(3,2),(3,5),(4,2),(4,5),(4,6),\ (5,6),(5,7),(5,8),(6,8),(7,1),(7,5),\ (7,8),(8,6),(8,7)] G.add_edges_from(edges,weight=2.0) self.G=G self.G.evc=[0.25368793, 0.19576478, 0.32817092, 0.40430835, 0.48199885, 0.15724483, 0.51346196, 0.32475403] H=networkx.DiGraph() edges=[(1,2),(1,3),(2,4),(3,2),(3,5),(4,2),(4,5),(4,6),\ (5,6),(5,7),(5,8),(6,8),(7,1),(7,5),\ (7,8),(8,6),(8,7)] G.add_edges_from(edges) self.H=G self.H.evc=[0.25368793, 0.19576478, 0.32817092, 0.40430835, 0.48199885, 0.15724483, 0.51346196, 0.32475403] def test_eigenvector_centrality_weighted(self): G=self.G p=networkx.eigenvector_centrality_numpy(G) for (a,b) in zip(list(p.values()),self.G.evc): assert_almost_equal(a,b) def test_eigenvector_centrality_unweighted(self): G=self.H p=networkx.eigenvector_centrality_numpy(G) for (a,b) in zip(list(p.values()),self.G.evc): assert_almost_equal(a,b) class TestEigenvectorCentralityExceptions(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np except ImportError: raise SkipTest('NumPy not available.') numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @raises(networkx.NetworkXException) def test_multigraph(self): e = networkx.eigenvector_centrality(networkx.MultiGraph()) @raises(networkx.NetworkXException) def test_multigraph_numpy(self): e = networkx.eigenvector_centrality_numpy(networkx.MultiGraph()) @raises(networkx.NetworkXException) def test_empty(self): e = networkx.eigenvector_centrality(networkx.Graph()) @raises(networkx.NetworkXException) def test_empty_numpy(self): e = networkx.eigenvector_centrality_numpy(networkx.Graph()) networkx-1.8.1/networkx/algorithms/centrality/tests/test_communicability.py0000664000175000017500000001245512177456333027457 0ustar aricaric00000000000000from collections import defaultdict from nose.tools import * from nose import SkipTest import networkx as nx from networkx.algorithms.centrality.communicability_alg import * class TestCommunicability: @classmethod def setupClass(cls): global numpy global scipy try: import numpy except ImportError: raise SkipTest('NumPy not available.') try: import scipy except ImportError: raise SkipTest('SciPy not available.') def test_communicability_centrality(self): answer={0: 1.5430806348152433, 1: 1.5430806348152433} result=communicability_centrality(nx.path_graph(2)) for k,v in result.items(): assert_almost_equal(answer[k],result[k],places=7) answer1={'1': 1.6445956054135658, 'Albert': 2.4368257358712189, 'Aric': 2.4368257358712193, 'Dan':3.1306328496328168, 'Franck': 2.3876142275231915} G1=nx.Graph([('Franck','Aric'),('Aric','Dan'),('Dan','Albert'), ('Albert','Franck'),('Dan','1'),('Franck','Albert')]) result1=communicability_centrality(G1) for k,v in result1.items(): assert_almost_equal(answer1[k],result1[k],places=7) result1=communicability_centrality_exp(G1) for k,v in result1.items(): assert_almost_equal(answer1[k],result1[k],places=7) def test_communicability_betweenness_centrality(self): answer={0: 0.07017447951484615, 1: 0.71565598701107991, 2: 0.71565598701107991, 3: 0.07017447951484615} result=communicability_betweenness_centrality(nx.path_graph(4)) for k,v in result.items(): assert_almost_equal(answer[k],result[k],places=7) answer1={'1': 0.060039074193949521, 'Albert': 0.315470761661372, 'Aric': 0.31547076166137211, 'Dan': 0.68297778678316201, 'Franck': 0.21977926617449497} G1=nx.Graph([('Franck','Aric'), ('Aric','Dan'),('Dan','Albert'),('Albert','Franck'), ('Dan','1'),('Franck','Albert')]) result1=communicability_betweenness_centrality(G1) for k,v in result1.items(): assert_almost_equal(answer1[k],result1[k],places=7) def test_communicability_betweenness_centrality_small(self): G = nx.Graph([(1,2)]) result=communicability_betweenness_centrality(G) assert_equal(result, {1:0,2:0}) def test_communicability(self): answer={0 :{0: 1.5430806348152435, 1: 1.1752011936438012 }, 1 :{0: 1.1752011936438012, 1: 1.5430806348152435 } } # answer={(0, 0): 1.5430806348152435, # (0, 1): 1.1752011936438012, # (1, 0): 1.1752011936438012, # (1, 1): 1.5430806348152435} result=communicability(nx.path_graph(2)) for k1,val in result.items(): for k2 in val: assert_almost_equal(answer[k1][k2],result[k1][k2],places=7) def test_communicability2(self): answer_orig ={('1', '1'): 1.6445956054135658, ('1', 'Albert'): 0.7430186221096251, ('1', 'Aric'): 0.7430186221096251, ('1', 'Dan'): 1.6208126320442937, ('1', 'Franck'): 0.42639707170035257, ('Albert', '1'): 0.7430186221096251, ('Albert', 'Albert'): 2.4368257358712189, ('Albert', 'Aric'): 1.4368257358712191, ('Albert', 'Dan'): 2.0472097037446453, ('Albert', 'Franck'): 1.8340111678944691, ('Aric', '1'): 0.7430186221096251, ('Aric', 'Albert'): 1.4368257358712191, ('Aric', 'Aric'): 2.4368257358712193, ('Aric', 'Dan'): 2.0472097037446457, ('Aric', 'Franck'): 1.8340111678944691, ('Dan', '1'): 1.6208126320442937, ('Dan', 'Albert'): 2.0472097037446453, ('Dan', 'Aric'): 2.0472097037446457, ('Dan', 'Dan'): 3.1306328496328168, ('Dan', 'Franck'): 1.4860372442192515, ('Franck', '1'): 0.42639707170035257, ('Franck', 'Albert'): 1.8340111678944691, ('Franck', 'Aric'): 1.8340111678944691, ('Franck', 'Dan'): 1.4860372442192515, ('Franck', 'Franck'): 2.3876142275231915} answer=defaultdict(dict) for (k1,k2),v in answer_orig.items(): answer[k1][k2]=v G1=nx.Graph([('Franck','Aric'),('Aric','Dan'),('Dan','Albert'), ('Albert','Franck'),('Dan','1'),('Franck','Albert')]) result=communicability(G1) for k1,val in result.items(): for k2 in val: assert_almost_equal(answer[k1][k2],result[k1][k2],places=7) result=communicability_exp(G1) for k1,val in result.items(): for k2 in val: assert_almost_equal(answer[k1][k2],result[k1][k2],places=7) def test_estrada_index(self): answer=1041.2470334195475 result=estrada_index(nx.karate_club_graph()) assert_almost_equal(answer,result,places=7) networkx-1.8.1/networkx/algorithms/centrality/tests/test_current_flow_closeness.py0000664000175000017500000000334512177456333031054 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx class TestFlowClosenessCentrality(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global np try: import numpy as np import scipy except ImportError: raise SkipTest('NumPy not available.') def test_K4(self): """Closeness centrality: K4""" G=networkx.complete_graph(4) b=networkx.current_flow_closeness_centrality(G,normalized=True) b_answer={0: 2.0, 1: 2.0, 2: 2.0, 3: 2.0} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P4_normalized(self): """Closeness centrality: P4 normalized""" G=networkx.path_graph(4) b=networkx.current_flow_closeness_centrality(G,normalized=True) b_answer={0: 1./2, 1: 3./4, 2: 3./4, 3:1./2} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_P4(self): """Closeness centrality: P4""" G=networkx.path_graph(4) b=networkx.current_flow_closeness_centrality(G,normalized=False) b_answer={0: 1.0/6, 1: 1.0/4, 2: 1.0/4, 3:1.0/6} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) def test_star(self): """Closeness centrality: star """ G=networkx.Graph() G.add_star(['a','b','c','d']) b=networkx.current_flow_closeness_centrality(G,normalized=True) b_answer={'a': 1.0, 'b': 0.6, 'c': 0.6, 'd':0.6} for n in sorted(G): assert_almost_equal(b[n],b_answer[n]) class TestWeightedFlowClosenessCentrality(object): pass networkx-1.8.1/networkx/algorithms/centrality/betweenness.py0000664000175000017500000002463112177456333024407 0ustar aricaric00000000000000""" Betweenness centrality measures. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import heapq import networkx as nx import random __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['betweenness_centrality', 'edge_betweenness_centrality', 'edge_betweenness'] def betweenness_centrality(G, k=None, normalized=True, weight=None, endpoints=False, seed=None): r"""Compute the shortest-path betweenness centrality for nodes. Betweenness centrality of a node `v` is the sum of the fraction of all-pairs shortest paths that pass through `v`: .. math:: c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)} where `V` is the set of nodes, `\sigma(s, t)` is the number of shortest `(s, t)`-paths, and `\sigma(s, t|v)` is the number of those paths passing through some node `v` other than `s, t`. If `s = t`, `\sigma(s, t) = 1`, and if `v \in {s, t}`, `\sigma(s, t|v) = 0` [2]_. Parameters ---------- G : graph A NetworkX graph k : int, optional (default=None) If k is not None use k node samples to estimate betweenness. The value of k <= n where n is the number of nodes in the graph. Higher values give better approximation. normalized : bool, optional If True the betweenness values are normalized by `2/((n-1)(n-2))` for graphs, and `1/((n-1)(n-2))` for directed graphs where `n` is the number of nodes in G. weight : None or string, optional If None, all edge weights are considered equal. Otherwise holds the name of the edge attribute used as weight. endpoints : bool, optional If True include the endpoints in the shortest path counts. Returns ------- nodes : dictionary Dictionary of nodes with betweenness centrality as the value. See Also -------- edge_betweenness_centrality load_centrality Notes ----- The algorithm is from Ulrik Brandes [1]_. See [2]_ for details on algorithms for variations and related metrics. For approximate betweenness calculations set k=#samples to use k nodes ("pivots") to estimate the betweenness values. For an estimate of the number of pivots needed see [3]_. For weighted graphs the edge weights must be greater than zero. Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. References ---------- .. [1] A Faster Algorithm for Betweenness Centrality. Ulrik Brandes, Journal of Mathematical Sociology 25(2):163-177, 2001. http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness Centrality and their Generic Computation. Social Networks 30(2):136-145, 2008. http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf .. [3] Ulrik Brandes and Christian Pich: Centrality Estimation in Large Networks. International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007. http://www.inf.uni-konstanz.de/algo/publications/bp-celn-06.pdf """ betweenness=dict.fromkeys(G,0.0) # b[v]=0 for v in G if k is None: nodes = G else: random.seed(seed) nodes = random.sample(G.nodes(), k) for s in nodes: # single source shortest paths if weight is None: # use BFS S,P,sigma=_single_source_shortest_path_basic(G,s) else: # use Dijkstra's algorithm S,P,sigma=_single_source_dijkstra_path_basic(G,s,weight) # accumulation if endpoints: betweenness=_accumulate_endpoints(betweenness,S,P,sigma,s) else: betweenness=_accumulate_basic(betweenness,S,P,sigma,s) # rescaling betweenness=_rescale(betweenness, len(G), normalized=normalized, directed=G.is_directed(), k=k) return betweenness def edge_betweenness_centrality(G,normalized=True,weight=None): r"""Compute betweenness centrality for edges. Betweenness centrality of an edge `e` is the sum of the fraction of all-pairs shortest paths that pass through `e`: .. math:: c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)} where `V` is the set of nodes,`\sigma(s, t)` is the number of shortest `(s, t)`-paths, and `\sigma(s, t|e)` is the number of those paths passing through edge `e` [2]_. Parameters ---------- G : graph A NetworkX graph normalized : bool, optional If True the betweenness values are normalized by `2/(n(n-1))` for graphs, and `1/(n(n-1))` for directed graphs where `n` is the number of nodes in G. weight : None or string, optional If None, all edge weights are considered equal. Otherwise holds the name of the edge attribute used as weight. Returns ------- edges : dictionary Dictionary of edges with betweenness centrality as the value. See Also -------- betweenness_centrality edge_load Notes ----- The algorithm is from Ulrik Brandes [1]_. For weighted graphs the edge weights must be greater than zero. Zero edge weights can produce an infinite number of equal length paths between pairs of nodes. References ---------- .. [1] A Faster Algorithm for Betweenness Centrality. Ulrik Brandes, Journal of Mathematical Sociology 25(2):163-177, 2001. http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf .. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness Centrality and their Generic Computation. Social Networks 30(2):136-145, 2008. http://www.inf.uni-konstanz.de/algo/publications/b-vspbc-08.pdf """ betweenness=dict.fromkeys(G,0.0) # b[v]=0 for v in G # b[e]=0 for e in G.edges() betweenness.update(dict.fromkeys(G.edges(),0.0)) for s in G: # single source shortest paths if weight is None: # use BFS S,P,sigma=_single_source_shortest_path_basic(G,s) else: # use Dijkstra's algorithm S,P,sigma=_single_source_dijkstra_path_basic(G,s,weight) # accumulation betweenness=_accumulate_edges(betweenness,S,P,sigma,s) # rescaling for n in G: # remove nodes to only return edges del betweenness[n] betweenness=_rescale_e(betweenness, len(G), normalized=normalized, directed=G.is_directed()) return betweenness # obsolete name def edge_betweenness(G,normalized=True,weight=None): return edge_betweenness_centrality(G,normalized,weight) # helpers for betweenness centrality def _single_source_shortest_path_basic(G,s): S=[] P={} for v in G: P[v]=[] sigma=dict.fromkeys(G,0.0) # sigma[v]=0 for v in G D={} sigma[s]=1.0 D[s]=0 Q=[s] while Q: # use BFS to find shortest paths v=Q.pop(0) S.append(v) Dv=D[v] sigmav=sigma[v] for w in G[v]: if w not in D: Q.append(w) D[w]=Dv+1 if D[w]==Dv+1: # this is a shortest path, count paths sigma[w] += sigmav P[w].append(v) # predecessors return S,P,sigma def _single_source_dijkstra_path_basic(G,s,weight='weight'): # modified from Eppstein S=[] P={} for v in G: P[v]=[] sigma=dict.fromkeys(G,0.0) # sigma[v]=0 for v in G D={} sigma[s]=1.0 push=heapq.heappush pop=heapq.heappop seen = {s:0} Q=[] # use Q as heap with (distance,node id) tuples push(Q,(0,s,s)) while Q: (dist,pred,v)=pop(Q) if v in D: continue # already searched this node. sigma[v] += sigma[pred] # count paths S.append(v) D[v] = dist for w,edgedata in G[v].items(): vw_dist = dist + edgedata.get(weight,1) if w not in D and (w not in seen or vw_dist < seen[w]): seen[w] = vw_dist push(Q,(vw_dist,v,w)) sigma[w]=0.0 P[w]=[v] elif vw_dist==seen[w]: # handle equal paths sigma[w] += sigma[v] P[w].append(v) return S,P,sigma def _accumulate_basic(betweenness,S,P,sigma,s): delta=dict.fromkeys(S,0) while S: w=S.pop() coeff=(1.0+delta[w])/sigma[w] for v in P[w]: delta[v] += sigma[v]*coeff if w != s: betweenness[w]+=delta[w] return betweenness def _accumulate_endpoints(betweenness,S,P,sigma,s): betweenness[s]+=len(S)-1 delta=dict.fromkeys(S,0) while S: w=S.pop() coeff=(1.0+delta[w])/sigma[w] for v in P[w]: delta[v] += sigma[v]*coeff if w != s: betweenness[w] += delta[w]+1 return betweenness def _accumulate_edges(betweenness,S,P,sigma,s): delta=dict.fromkeys(S,0) while S: w=S.pop() coeff=(1.0+delta[w])/sigma[w] for v in P[w]: c=sigma[v]*coeff if (v,w) not in betweenness: betweenness[(w,v)]+=c else: betweenness[(v,w)]+=c delta[v]+=c if w != s: betweenness[w]+=delta[w] return betweenness def _rescale(betweenness,n,normalized,directed=False,k=None): if normalized is True: if n <=2: scale=None # no normalization b=0 for all nodes else: scale=1.0/((n-1)*(n-2)) else: # rescale by 2 for undirected graphs if not directed: scale=1.0/2.0 else: scale=None if scale is not None: if k is not None: scale=scale*n/k for v in betweenness: betweenness[v] *= scale return betweenness def _rescale_e(betweenness,n,normalized,directed=False): if normalized is True: if n <=1: scale=None # no normalization b=0 for all nodes else: scale=1.0/(n*(n-1)) else: # rescale by 2 for undirected graphs if not directed: scale=1.0/2.0 else: scale=None if scale is not None: for v in betweenness: betweenness[v] *= scale return betweenness networkx-1.8.1/networkx/algorithms/centrality/current_flow_betweenness_subset.py0000664000175000017500000002251112177456333030560 0ustar aricaric00000000000000""" Current-flow betweenness centrality measures for subsets of nodes. """ # Copyright (C) 2010-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['current_flow_betweenness_centrality_subset', 'edge_current_flow_betweenness_centrality_subset'] import itertools import networkx as nx from networkx.algorithms.centrality.flow_matrix import * def current_flow_betweenness_centrality_subset(G,sources,targets, normalized=True, weight='weight', dtype=float, solver='lu'): r"""Compute current-flow betweenness centrality for subsets of nodes. Current-flow betweenness centrality uses an electrical current model for information spreading in contrast to betweenness centrality which uses shortest paths. Current-flow betweenness centrality is also known as random-walk betweenness centrality [2]_. Parameters ---------- G : graph A NetworkX graph sources: list of nodes Nodes to use as sources for current targets: list of nodes Nodes to use as sinks for current normalized : bool, optional (default=True) If True the betweenness values are normalized by b=b/(n-1)(n-2) where n is the number of nodes in G. weight : string or None, optional (default='weight') Key for edge data used as the edge weight. If None, then use 1 as each edge weight. dtype: data type (float) Default data type for internal matrices. Set to np.float32 for lower memory consumption. solver: string (default='lu') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). Returns ------- nodes : dictionary Dictionary of nodes with betweenness centrality as the value. See Also -------- approximate_current_flow_betweenness_centrality betweenness_centrality edge_betweenness_centrality edge_current_flow_betweenness_centrality Notes ----- Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)` time [1]_, where `I(n-1)` is the time needed to compute the inverse Laplacian. For a full matrix this is `O(n^3)` but using sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the Laplacian matrix condition number. The space required is `O(nw) where `w` is the width of the sparse Laplacian matrix. Worse case is `w=n` for `O(n^2)`. If the edges have a 'weight' attribute they will be used as weights in this algorithm. Unspecified weights are set to 1. References ---------- .. [1] Centrality Measures Based on Current Flow. Ulrik Brandes and Daniel Fleischer, Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). LNCS 3404, pp. 533-544. Springer-Verlag, 2005. http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf .. [2] A measure of betweenness centrality based on random walks, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering try: import numpy as np except ImportError: raise ImportError('current_flow_betweenness_centrality requires NumPy ', 'http://scipy.org/') try: import scipy except ImportError: raise ImportError('current_flow_betweenness_centrality requires SciPy ', 'http://scipy.org/') if G.is_directed(): raise nx.NetworkXError('current_flow_betweenness_centrality() ', 'not defined for digraphs.') if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to mapping=dict(zip(ordering,range(n))) H = nx.relabel_nodes(G,mapping) betweenness = dict.fromkeys(H,0.0) # b[v]=0 for v in H for row,(s,t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i=mapping[ss] for tt in targets: j=mapping[tt] betweenness[s]+=0.5*np.abs(row[i]-row[j]) betweenness[t]+=0.5*np.abs(row[i]-row[j]) if normalized: nb=(n-1.0)*(n-2.0) # normalization factor else: nb=2.0 for v in H: betweenness[v]=betweenness[v]/nb+1.0/(2-n) return dict((ordering[k],v) for k,v in betweenness.items()) def edge_current_flow_betweenness_centrality_subset(G, sources, targets, normalized=True, weight='weight', dtype=float, solver='lu'): """Compute current-flow betweenness centrality for edges using subsets of nodes. Current-flow betweenness centrality uses an electrical current model for information spreading in contrast to betweenness centrality which uses shortest paths. Current-flow betweenness centrality is also known as random-walk betweenness centrality [2]_. Parameters ---------- G : graph A NetworkX graph sources: list of nodes Nodes to use as sources for current targets: list of nodes Nodes to use as sinks for current normalized : bool, optional (default=True) If True the betweenness values are normalized by b=b/(n-1)(n-2) where n is the number of nodes in G. weight : string or None, optional (default='weight') Key for edge data used as the edge weight. If None, then use 1 as each edge weight. dtype: data type (float) Default data type for internal matrices. Set to np.float32 for lower memory consumption. solver: string (default='lu') Type of linear solver to use for computing the flow matrix. Options are "full" (uses most memory), "lu" (recommended), and "cg" (uses least memory). Returns ------- nodes : dictionary Dictionary of edge tuples with betweenness centrality as the value. See Also -------- betweenness_centrality edge_betweenness_centrality current_flow_betweenness_centrality Notes ----- Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)` time [1]_, where `I(n-1)` is the time needed to compute the inverse Laplacian. For a full matrix this is `O(n^3)` but using sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the Laplacian matrix condition number. The space required is `O(nw) where `w` is the width of the sparse Laplacian matrix. Worse case is `w=n` for `O(n^2)`. If the edges have a 'weight' attribute they will be used as weights in this algorithm. Unspecified weights are set to 1. References ---------- .. [1] Centrality Measures Based on Current Flow. Ulrik Brandes and Daniel Fleischer, Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). LNCS 3404, pp. 533-544. Springer-Verlag, 2005. http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf .. [2] A measure of betweenness centrality based on random walks, M. E. J. Newman, Social Networks 27, 39-54 (2005). """ from networkx.utils import reverse_cuthill_mckee_ordering try: import numpy as np except ImportError: raise ImportError('current_flow_betweenness_centrality requires NumPy ', 'http://scipy.org/') try: import scipy except ImportError: raise ImportError('current_flow_betweenness_centrality requires SciPy ', 'http://scipy.org/') if G.is_directed(): raise nx.NetworkXError('edge_current_flow_betweenness_centrality ', 'not defined for digraphs.') if not nx.is_connected(G): raise nx.NetworkXError("Graph not connected.") n = G.number_of_nodes() ordering = list(reverse_cuthill_mckee_ordering(G)) # make a copy with integer labels according to rcm ordering # this could be done without a copy if we really wanted to mapping=dict(zip(ordering,range(n))) H = nx.relabel_nodes(G,mapping) betweenness=(dict.fromkeys(H.edges(),0.0)) if normalized: nb=(n-1.0)*(n-2.0) # normalization factor else: nb=2.0 for row,(e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver): for ss in sources: i=mapping[ss] for tt in targets: j=mapping[tt] betweenness[e]+=0.5*np.abs(row[i]-row[j]) betweenness[e]/=nb return dict(((ordering[s],ordering[t]),v) for (s,t),v in betweenness.items()) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy import scipy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/algorithms/centrality/degree_alg.py0000664000175000017500000000671712177456333024150 0ustar aricaric00000000000000""" Degree centrality measures. """ # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Sasha Gutfraind (ag362@cornell.edu)']) __all__ = ['degree_centrality', 'in_degree_centrality', 'out_degree_centrality'] import networkx as nx def degree_centrality(G): """Compute the degree centrality for nodes. The degree centrality for a node v is the fraction of nodes it is connected to. Parameters ---------- G : graph A networkx graph Returns ------- nodes : dictionary Dictionary of nodes with degree centrality as the value. See Also -------- betweenness_centrality, load_centrality, eigenvector_centrality Notes ----- The degree centrality values are normalized by dividing by the maximum possible degree in a simple graph n-1 where n is the number of nodes in G. For multigraphs or graphs with self loops the maximum degree might be higher than n-1 and values of degree centrality greater than 1 are possible. """ centrality={} s=1.0/(len(G)-1.0) centrality=dict((n,d*s) for n,d in G.degree_iter()) return centrality def in_degree_centrality(G): """Compute the in-degree centrality for nodes. The in-degree centrality for a node v is the fraction of nodes its incoming edges are connected to. Parameters ---------- G : graph A NetworkX graph Returns ------- nodes : dictionary Dictionary of nodes with in-degree centrality as values. See Also -------- degree_centrality, out_degree_centrality Notes ----- The degree centrality values are normalized by dividing by the maximum possible degree in a simple graph n-1 where n is the number of nodes in G. For multigraphs or graphs with self loops the maximum degree might be higher than n-1 and values of degree centrality greater than 1 are possible. """ if not G.is_directed(): raise nx.NetworkXError(\ "in_degree_centrality() not defined for undirected graphs.") centrality={} s=1.0/(len(G)-1.0) centrality=dict((n,d*s) for n,d in G.in_degree_iter()) return centrality def out_degree_centrality(G): """Compute the out-degree centrality for nodes. The out-degree centrality for a node v is the fraction of nodes its outgoing edges are connected to. Parameters ---------- G : graph A NetworkX graph Returns ------- nodes : dictionary Dictionary of nodes with out-degree centrality as values. See Also -------- degree_centrality, in_degree_centrality Notes ----- The degree centrality values are normalized by dividing by the maximum possible degree in a simple graph n-1 where n is the number of nodes in G. For multigraphs or graphs with self loops the maximum degree might be higher than n-1 and values of degree centrality greater than 1 are possible. """ if not G.is_directed(): raise nx.NetworkXError(\ "out_degree_centrality() not defined for undirected graphs.") centrality={} s=1.0/(len(G)-1.0) centrality=dict((n,d*s) for n,d in G.out_degree_iter()) return centrality networkx-1.8.1/networkx/algorithms/flow/0000775000175000017500000000000012177457361020300 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/flow/__init__.py0000664000175000017500000000013712177456333022410 0ustar aricaric00000000000000from networkx.algorithms.flow.maxflow import * from networkx.algorithms.flow.mincost import * networkx-1.8.1/networkx/algorithms/flow/tests/0000775000175000017500000000000012177457361021442 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/flow/tests/test_maxflow.py0000664000175000017500000002205012177456333024525 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Max flow algorithm test suite. Run with nose: nosetests -v test_max_flow.py """ __author__ = """Loïc Séguin-C. """ # Copyright (C) 2010 Loïc Séguin-C. # All rights reserved. # BSD license. import networkx as nx from nose.tools import * def compare_flows(G, s, t, solnFlows, solnValue): flowValue, flowDict = nx.ford_fulkerson(G, s, t) assert_equal(flowValue, solnValue) assert_equal(flowDict, solnFlows) assert_equal(nx.min_cut(G, s, t), solnValue) assert_equal(nx.max_flow(G, s, t), solnValue) assert_equal(nx.ford_fulkerson_flow(G, s, t), solnFlows) class TestMaxflow: def test_graph1(self): # Trivial undirected graph G = nx.Graph() G.add_edge(1,2, capacity = 1.0) solnFlows = {1: {2: 1.0}, 2: {1: 1.0}} compare_flows(G, 1, 2, solnFlows, 1.0) def test_graph2(self): # A more complex undirected graph # adapted from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow G = nx.Graph() G.add_edge('x','a', capacity = 3.0) G.add_edge('x','b', capacity = 1.0) G.add_edge('a','c', capacity = 3.0) G.add_edge('b','c', capacity = 5.0) G.add_edge('b','d', capacity = 4.0) G.add_edge('d','e', capacity = 2.0) G.add_edge('c','y', capacity = 2.0) G.add_edge('e','y', capacity = 3.0) H = {'x': {'a': 3, 'b': 1}, 'a': {'c': 3, 'x': 3}, 'b': {'c': 1, 'd': 2, 'x': 1}, 'c': {'a': 3, 'b': 1, 'y': 2}, 'd': {'b': 2, 'e': 2}, 'e': {'d': 2, 'y': 2}, 'y': {'c': 2, 'e': 2}} compare_flows(G, 'x', 'y', H, 4.0) def test_digraph1(self): # The classic directed graph example G = nx.DiGraph() G.add_edge('a','b', capacity = 1000.0) G.add_edge('a','c', capacity = 1000.0) G.add_edge('b','c', capacity = 1.0) G.add_edge('b','d', capacity = 1000.0) G.add_edge('c','d', capacity = 1000.0) H = {'a': {'b': 1000.0, 'c': 1000.0}, 'b': {'c': 0, 'd': 1000.0}, 'c': {'d': 1000.0}, 'd': {}} compare_flows(G, 'a', 'd', H, 2000.0) # An example in which some edges end up with zero flow. G = nx.DiGraph() G.add_edge('s', 'b', capacity = 2) G.add_edge('s', 'c', capacity = 1) G.add_edge('c', 'd', capacity = 1) G.add_edge('d', 'a', capacity = 1) G.add_edge('b', 'a', capacity = 2) G.add_edge('a', 't', capacity = 2) H = {'s': {'b': 2, 'c': 0}, 'c': {'d': 0}, 'd': {'a': 0}, 'b': {'a': 2}, 'a': {'t': 2}, 't': {}} compare_flows(G, 's', 't', H, 2) def test_digraph2(self): # A directed graph example from Cormen et al. G = nx.DiGraph() G.add_edge('s','v1', capacity = 16.0) G.add_edge('s','v2', capacity = 13.0) G.add_edge('v1','v2', capacity = 10.0) G.add_edge('v2','v1', capacity = 4.0) G.add_edge('v1','v3', capacity = 12.0) G.add_edge('v3','v2', capacity = 9.0) G.add_edge('v2','v4', capacity = 14.0) G.add_edge('v4','v3', capacity = 7.0) G.add_edge('v3','t', capacity = 20.0) G.add_edge('v4','t', capacity = 4.0) H = {'s': {'v1': 12.0, 'v2': 11.0}, 'v2': {'v1': 0, 'v4': 11.0}, 'v1': {'v2': 0, 'v3': 12.0}, 'v3': {'v2': 0, 't': 19.0}, 'v4': {'v3': 7.0, 't': 4.0}, 't': {}} compare_flows(G, 's', 't', H, 23.0) def test_digraph3(self): # A more complex directed graph # from www.topcoder.com/tc?module=Statc&d1=tutorials&d2=maxFlow G = nx.DiGraph() G.add_edge('x','a', capacity = 3.0) G.add_edge('x','b', capacity = 1.0) G.add_edge('a','c', capacity = 3.0) G.add_edge('b','c', capacity = 5.0) G.add_edge('b','d', capacity = 4.0) G.add_edge('d','e', capacity = 2.0) G.add_edge('c','y', capacity = 2.0) G.add_edge('e','y', capacity = 3.0) H = {'x': {'a': 2.0, 'b': 1.0}, 'a': {'c': 2.0}, 'b': {'c': 0, 'd': 1.0}, 'c': {'y': 2.0}, 'd': {'e': 1.0}, 'e': {'y': 1.0}, 'y': {}} compare_flows(G, 'x', 'y', H, 3.0) def test_optional_capacity(self): # Test optional capacity parameter. G = nx.DiGraph() G.add_edge('x','a', spam = 3.0) G.add_edge('x','b', spam = 1.0) G.add_edge('a','c', spam = 3.0) G.add_edge('b','c', spam = 5.0) G.add_edge('b','d', spam = 4.0) G.add_edge('d','e', spam = 2.0) G.add_edge('c','y', spam = 2.0) G.add_edge('e','y', spam = 3.0) solnFlows = {'x': {'a': 2.0, 'b': 1.0}, 'a': {'c': 2.0}, 'b': {'c': 0, 'd': 1.0}, 'c': {'y': 2.0}, 'd': {'e': 1.0}, 'e': {'y': 1.0}, 'y': {}} solnValue = 3.0 s = 'x' t = 'y' flowValue, flowDict = nx.ford_fulkerson(G, s, t, capacity = 'spam') assert_equal(flowValue, solnValue) assert_equal(flowDict, solnFlows) assert_equal(nx.min_cut(G, s, t, capacity = 'spam'), solnValue) assert_equal(nx.max_flow(G, s, t, capacity = 'spam'), solnValue) assert_equal(nx.ford_fulkerson_flow(G, s, t, capacity = 'spam'), solnFlows) def test_digraph_infcap_edges(self): # DiGraph with infinite capacity edges G = nx.DiGraph() G.add_edge('s', 'a') G.add_edge('s', 'b', capacity = 30) G.add_edge('a', 'c', capacity = 25) G.add_edge('b', 'c', capacity = 12) G.add_edge('a', 't', capacity = 60) G.add_edge('c', 't') H = {'s': {'a': 85, 'b': 12}, 'a': {'c': 25, 't': 60}, 'b': {'c': 12}, 'c': {'t': 37}, 't': {}} compare_flows(G, 's', 't', H, 97) # DiGraph with infinite capacity digon G = nx.DiGraph() G.add_edge('s', 'a', capacity = 85) G.add_edge('s', 'b', capacity = 30) G.add_edge('a', 'c') G.add_edge('c', 'a') G.add_edge('b', 'c', capacity = 12) G.add_edge('a', 't', capacity = 60) G.add_edge('c', 't', capacity = 37) H = {'s': {'a': 85, 'b': 12}, 'a': {'c': 25, 't': 60}, 'c': {'a': 0, 't': 37}, 'b': {'c': 12}, 't': {}} compare_flows(G, 's', 't', H, 97) def test_digraph_infcap_path(self): # Graph with infinite capacity (s, t)-path G = nx.DiGraph() G.add_edge('s', 'a') G.add_edge('s', 'b', capacity = 30) G.add_edge('a', 'c') G.add_edge('b', 'c', capacity = 12) G.add_edge('a', 't', capacity = 60) G.add_edge('c', 't') assert_raises(nx.NetworkXUnbounded, nx.ford_fulkerson, G, 's', 't') assert_raises(nx.NetworkXUnbounded, nx.max_flow, G, 's', 't') assert_raises(nx.NetworkXUnbounded, nx.ford_fulkerson_flow, G, 's', 't') assert_raises(nx.NetworkXUnbounded, nx.min_cut, G, 's', 't') def test_graph_infcap_edges(self): # Undirected graph with infinite capacity edges G = nx.Graph() G.add_edge('s', 'a') G.add_edge('s', 'b', capacity = 30) G.add_edge('a', 'c', capacity = 25) G.add_edge('b', 'c', capacity = 12) G.add_edge('a', 't', capacity = 60) G.add_edge('c', 't') H = {'s': {'a': 85, 'b': 12}, 'a': {'c': 25, 's': 85, 't': 60}, 'b': {'c': 12, 's': 12}, 'c': {'a': 25, 'b': 12, 't': 37}, 't': {'a': 60, 'c': 37}} compare_flows(G, 's', 't', H, 97) def test_digraph4(self): # From ticket #429 by mfrasca. G = nx.DiGraph() G.add_edge('s', 'a', capacity = 2) G.add_edge('s', 'b', capacity = 2) G.add_edge('a', 'b', capacity = 5) G.add_edge('a', 't', capacity = 1) G.add_edge('b', 'a', capacity = 1) G.add_edge('b', 't', capacity = 3) flowSoln = {'a': {'b': 1, 't': 1}, 'b': {'a': 0, 't': 3}, 's': {'a': 2, 'b': 2}, 't': {}} compare_flows(G, 's', 't', flowSoln, 4) def test_disconnected(self): G = nx.Graph() G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity') G.remove_node(1) assert_equal(nx.max_flow(G,0,3),0) def test_source_target_not_in_graph(self): G = nx.Graph() G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity') G.remove_node(0) assert_raises(nx.NetworkXError,nx.max_flow,G,0,3) G.add_weighted_edges_from([(0,1,1),(1,2,1),(2,3,1)],weight='capacity') G.remove_node(3) assert_raises(nx.NetworkXError,nx.max_flow,G,0,3) networkx-1.8.1/networkx/algorithms/flow/tests/test_mincost.py0000664000175000017500000002612512177456333024533 0ustar aricaric00000000000000# -*- coding: utf-8 -*- import networkx as nx from nose.tools import assert_equal, assert_raises class TestNetworkSimplex: def test_simple_digraph(self): G = nx.DiGraph() G.add_node('a', demand = -5) G.add_node('d', demand = 5) G.add_edge('a', 'b', weight = 3, capacity = 4) G.add_edge('a', 'c', weight = 6, capacity = 10) G.add_edge('b', 'd', weight = 1, capacity = 9) G.add_edge('c', 'd', weight = 2, capacity = 5) flowCost, H = nx.network_simplex(G) soln = {'a': {'b': 4, 'c': 1}, 'b': {'d': 4}, 'c': {'d': 1}, 'd': {}} assert_equal(flowCost, 24) assert_equal(nx.min_cost_flow_cost(G), 24) assert_equal(H, soln) assert_equal(nx.min_cost_flow(G), soln) assert_equal(nx.cost_of_flow(G, H), 24) def test_negcycle_infcap(self): G = nx.DiGraph() G.add_node('s', demand = -5) G.add_node('t', demand = 5) G.add_edge('s', 'a', weight = 1, capacity = 3) G.add_edge('a', 'b', weight = 3) G.add_edge('c', 'a', weight = -6) G.add_edge('b', 'd', weight = 1) G.add_edge('d', 'c', weight = -2) G.add_edge('d', 't', weight = 1, capacity = 3) assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G) def test_sum_demands_not_zero(self): G = nx.DiGraph() G.add_node('s', demand = -5) G.add_node('t', demand = 4) G.add_edge('s', 'a', weight = 1, capacity = 3) G.add_edge('a', 'b', weight = 3) G.add_edge('a', 'c', weight = -6) G.add_edge('b', 'd', weight = 1) G.add_edge('c', 'd', weight = -2) G.add_edge('d', 't', weight = 1, capacity = 3) assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) def test_no_flow_satisfying_demands(self): G = nx.DiGraph() G.add_node('s', demand = -5) G.add_node('t', demand = 5) G.add_edge('s', 'a', weight = 1, capacity = 3) G.add_edge('a', 'b', weight = 3) G.add_edge('a', 'c', weight = -6) G.add_edge('b', 'd', weight = 1) G.add_edge('c', 'd', weight = -2) G.add_edge('d', 't', weight = 1, capacity = 3) assert_raises(nx.NetworkXUnfeasible, nx.network_simplex, G) def test_transshipment(self): G = nx.DiGraph() G.add_node('a', demand = 1) G.add_node('b', demand = -2) G.add_node('c', demand = -2) G.add_node('d', demand = 3) G.add_node('e', demand = -4) G.add_node('f', demand = -4) G.add_node('g', demand = 3) G.add_node('h', demand = 2) G.add_node('r', demand = 3) G.add_edge('a', 'c', weight = 3) G.add_edge('r', 'a', weight = 2) G.add_edge('b', 'a', weight = 9) G.add_edge('r', 'c', weight = 0) G.add_edge('b', 'r', weight = -6) G.add_edge('c', 'd', weight = 5) G.add_edge('e', 'r', weight = 4) G.add_edge('e', 'f', weight = 3) G.add_edge('h', 'b', weight = 4) G.add_edge('f', 'd', weight = 7) G.add_edge('f', 'h', weight = 12) G.add_edge('g', 'd', weight = 12) G.add_edge('f', 'g', weight = -1) G.add_edge('h', 'g', weight = -10) flowCost, H = nx.network_simplex(G) soln = {'a': {'c': 0}, 'b': {'a': 0, 'r': 2}, 'c': {'d': 3}, 'd': {}, 'e': {'r': 3, 'f': 1}, 'f': {'d': 0, 'g': 3, 'h': 2}, 'g': {'d': 0}, 'h': {'b': 0, 'g': 0}, 'r': {'a': 1, 'c': 1}} assert_equal(flowCost, 41) assert_equal(nx.min_cost_flow_cost(G), 41) assert_equal(H, soln) assert_equal(nx.min_cost_flow(G), soln) assert_equal(nx.cost_of_flow(G, H), 41) def test_max_flow_min_cost(self): G = nx.DiGraph() G.add_edge('s', 'a', bandwidth = 6) G.add_edge('s', 'c', bandwidth = 10, cost = 10) G.add_edge('a', 'b', cost = 6) G.add_edge('b', 'd', bandwidth = 8, cost = 7) G.add_edge('c', 'd', cost = 10) G.add_edge('d', 't', bandwidth = 5, cost = 5) soln = {'s': {'a': 5, 'c': 0}, 'a': {'b': 5}, 'b': {'d': 5}, 'c': {'d': 0}, 'd': {'t': 5}, 't': {}} flow = nx.max_flow_min_cost(G, 's', 't', capacity = 'bandwidth', weight = 'cost') assert_equal(flow, soln) assert_equal(nx.cost_of_flow(G, flow, weight = 'cost'), 90) def test_digraph1(self): # From Bradley, S. P., Hax, A. C. and Magnanti, T. L. Applied # Mathematical Programming. Addison-Wesley, 1977. G = nx.DiGraph() G.add_node(1, demand = -20) G.add_node(4, demand = 5) G.add_node(5, demand = 15) G.add_edges_from([(1, 2, {'capacity': 15, 'weight': 4}), (1, 3, {'capacity': 8, 'weight': 4}), (2, 3, {'weight': 2}), (2, 4, {'capacity': 4, 'weight': 2}), (2, 5, {'capacity': 10, 'weight': 6}), (3, 4, {'capacity': 15, 'weight': 1}), (3, 5, {'capacity': 5, 'weight': 3}), (4, 5, {'weight': 2}), (5, 3, {'capacity': 4, 'weight': 1})]) flowCost, H = nx.network_simplex(G) soln = {1: {2: 12, 3: 8}, 2: {3: 8, 4: 4, 5: 0}, 3: {4: 11, 5: 5}, 4: {5: 10}, 5: {3: 0}} assert_equal(flowCost, 150) assert_equal(nx.min_cost_flow_cost(G), 150) assert_equal(H, soln) assert_equal(nx.min_cost_flow(G), soln) assert_equal(nx.cost_of_flow(G, H), 150) def test_digraph2(self): # Example from ticket #430 from mfrasca. Original source: # http://www.cs.princeton.edu/courses/archive/spr03/cs226/lectures/mincost.4up.pdf, slide 11. G = nx.DiGraph() G.add_edge('s', 1, capacity=12) G.add_edge('s', 2, capacity=6) G.add_edge('s', 3, capacity=14) G.add_edge(1, 2, capacity=11, weight=4) G.add_edge(2, 3, capacity=9, weight=6) G.add_edge(1, 4, capacity=5, weight=5) G.add_edge(1, 5, capacity=2, weight=12) G.add_edge(2, 5, capacity=4, weight=4) G.add_edge(2, 6, capacity=2, weight=6) G.add_edge(3, 6, capacity=31, weight=3) G.add_edge(4, 5, capacity=18, weight=4) G.add_edge(5, 6, capacity=9, weight=5) G.add_edge(4, 't', capacity=3) G.add_edge(5, 't', capacity=7) G.add_edge(6, 't', capacity=22) flow = nx.max_flow_min_cost(G, 's', 't') soln = {1: {2: 6, 4: 5, 5: 1}, 2: {3: 6, 5: 4, 6: 2}, 3: {6: 20}, 4: {5: 2, 't': 3}, 5: {6: 0, 't': 7}, 6: {'t': 22}, 's': {1: 12, 2: 6, 3: 14}, 't': {}} assert_equal(flow, soln) def test_digraph3(self): """Combinatorial Optimization: Algorithms and Complexity, Papadimitriou Steiglitz at page 140 has an example, 7.1, but that admits multiple solutions, so I alter it a bit. From ticket #430 by mfrasca.""" G = nx.DiGraph() G.add_edge('s', 'a', {0: 2, 1: 4}) G.add_edge('s', 'b', {0: 2, 1: 1}) G.add_edge('a', 'b', {0: 5, 1: 2}) G.add_edge('a', 't', {0: 1, 1: 5}) G.add_edge('b', 'a', {0: 1, 1: 3}) G.add_edge('b', 't', {0: 3, 1: 2}) "PS.ex.7.1: testing main function" sol = nx.max_flow_min_cost(G, 's', 't', capacity=0, weight=1) flow = sum(v for v in sol['s'].values()) assert_equal(4, flow) assert_equal(23, nx.cost_of_flow(G, sol, weight=1)) assert_equal(sol['s'], {'a': 2, 'b': 2}) assert_equal(sol['a'], {'b': 1, 't': 1}) assert_equal(sol['b'], {'a': 0, 't': 3}) assert_equal(sol['t'], {}) def test_zero_capacity_edges(self): """Address issue raised in ticket #617 by arv.""" G = nx.DiGraph() G.add_edges_from([(1, 2, {'capacity': 1, 'weight': 1}), (1, 5, {'capacity': 1, 'weight': 1}), (2, 3, {'capacity': 0, 'weight': 1}), (2, 5, {'capacity': 1, 'weight': 1}), (5, 3, {'capacity': 2, 'weight': 1}), (5, 4, {'capacity': 0, 'weight': 1}), (3, 4, {'capacity': 2, 'weight': 1})]) G.node[1]['demand'] = -1 G.node[2]['demand'] = -1 G.node[4]['demand'] = 2 flowCost, H = nx.network_simplex(G) soln = {1: {2: 0, 5: 1}, 2: {3: 0, 5: 1}, 3: {4: 2}, 4: {}, 5: {3: 2, 4: 0}} assert_equal(flowCost, 6) assert_equal(nx.min_cost_flow_cost(G), 6) assert_equal(H, soln) assert_equal(nx.min_cost_flow(G), soln) assert_equal(nx.cost_of_flow(G, H), 6) def test_digon(self): """Check if digons are handled properly. Taken from ticket #618 by arv.""" nodes = [(1, {}), (2, {'demand': -4}), (3, {'demand': 4}), ] edges = [(1, 2, {'capacity': 3, 'weight': 600000}), (2, 1, {'capacity': 2, 'weight': 0}), (2, 3, {'capacity': 5, 'weight': 714285}), (3, 2, {'capacity': 2, 'weight': 0}), ] G = nx.DiGraph(edges) G.add_nodes_from(nodes) flowCost, H = nx.network_simplex(G) soln = {1: {2: 0}, 2: {1: 0, 3: 4}, 3: {2: 0}} assert_equal(flowCost, 2857140) assert_equal(nx.min_cost_flow_cost(G), 2857140) assert_equal(H, soln) assert_equal(nx.min_cost_flow(G), soln) assert_equal(nx.cost_of_flow(G, H), 2857140) def test_infinite_capacity_neg_digon(self): """An infinite capacity negative cost digon results in an unbounded instance.""" nodes = [(1, {}), (2, {'demand': -4}), (3, {'demand': 4}), ] edges = [(1, 2, {'weight': -600}), (2, 1, {'weight': 0}), (2, 3, {'capacity': 5, 'weight': 714285}), (3, 2, {'capacity': 2, 'weight': 0}), ] G = nx.DiGraph(edges) G.add_nodes_from(nodes) assert_raises(nx.NetworkXUnbounded, nx.network_simplex, G) def test_finite_capacity_neg_digon(self): """The digon should receive the maximum amount of flow it can handle. Taken from ticket #749 by @chuongdo.""" G = nx.DiGraph() G.add_edge('a', 'b', capacity=1, weight=-1) G.add_edge('b', 'a', capacity=1, weight=-1) min_cost = -2 assert_equal(nx.min_cost_flow_cost(G), min_cost) def test_multidigraph(self): """Raise an exception for multidigraph.""" G = nx.MultiDiGraph() G.add_weighted_edges_from([(1, 2, 1), (2, 3, 2)], weight='capacity') assert_raises(nx.NetworkXError, nx.network_simplex, G) networkx-1.8.1/networkx/algorithms/flow/tests/test_maxflow_large_graph.py0000664000175000017500000000264012177456333027063 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Max flow algorithm test suite on large graphs. Run with nose: nosetests -v test_max_flow.py """ __author__ = """Loïc Séguin-C. """ # Copyright (C) 2010 Loïc Séguin-C. # All rights reserved. # BSD license. import networkx as nx from nose.tools import * def gen_pyramid(N): # This graph admits a flow of value 1 for which every arc is at # capacity (except the arcs incident to the sink which have # infinite capacity). G = nx.DiGraph() for i in range(N - 1): cap = 1. / (i + 2) for j in range(i + 1): G.add_edge((i, j), (i + 1, j), capacity = cap) cap = 1. / (i + 1) - cap G.add_edge((i, j), (i + 1, j + 1), capacity = cap) cap = 1. / (i + 2) - cap for j in range(N): G.add_edge((N - 1, j), 't') return G class TestMaxflowLargeGraph: def test_complete_graph(self): N = 50 G = nx.complete_graph(N) for (u, v) in G.edges(): G[u][v]['capacity'] = 5 assert_equal(nx.ford_fulkerson(G, 1, 2)[0], 5 * (N - 1)) def test_pyramid(self): N = 10 # N = 100 # this gives a graph with 5051 nodes G = gen_pyramid(N) assert_almost_equal(nx.ford_fulkerson(G, (0, 0), 't')[0], 1.) networkx-1.8.1/networkx/algorithms/flow/maxflow.py0000664000175000017500000003765512177456333022345 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Maximum flow (and minimum cut) algorithms on capacitated graphs. """ __author__ = """Loïc Séguin-C. """ # Copyright (C) 2010 Loïc Séguin-C. # All rights reserved. # BSD license. import networkx as nx __all__ = ['ford_fulkerson', 'ford_fulkerson_flow', 'ford_fulkerson_flow_and_auxiliary', 'max_flow', 'min_cut'] def ford_fulkerson_flow_and_auxiliary(G, s, t, capacity='capacity'): """Find a maximum single-commodity flow using the Ford-Fulkerson algorithm. This function returns both the value of the maximum flow and the auxiliary network resulting after finding the maximum flow, which is also named residual network in the literature. The auxiliary network has edges with capacity equal to the capacity of the edge in the original network minus the flow that went throught that edge. Notice that it can happen that a flow from v to u is allowed in the auxiliary network, though disallowed in the original network. A dictionary with infinite capacity edges can be found as an attribute of the auxiliary network. Parameters ---------- G : NetworkX graph Edges of the graph are expected to have an attribute called 'capacity'. If this attribute is not present, the edge is considered to have infinite capacity. s : node Source node for the flow. t : node Sink node for the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. Returns ------- flow_value : integer, float Value of the maximum flow, i.e., net outflow from the source. auxiliary : DiGraph Residual/auxiliary network after finding the maximum flow. A dictionary with infinite capacity edges can be found as an attribute of this network: auxiliary.graph['inf_capacity_flows'] Raises ------ NetworkXError The algorithm does not support MultiGraph and MultiDiGraph. If the input graph is an instance of one of these two classes, a NetworkXError is raised. NetworkXUnbounded If the graph has a path of infinite capacity, the value of a feasible flow on the graph is unbounded above and the function raises a NetworkXUnbounded. Notes ----- This algorithm uses Edmonds-Karp-Dinitz path selection rule which guarantees a running time of `O(nm^2)` for `n` nodes and `m` edges. Examples -------- >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edge('x','a', capacity=3.0) >>> G.add_edge('x','b', capacity=1.0) >>> G.add_edge('a','c', capacity=3.0) >>> G.add_edge('b','c', capacity=5.0) >>> G.add_edge('b','d', capacity=4.0) >>> G.add_edge('d','e', capacity=2.0) >>> G.add_edge('c','y', capacity=2.0) >>> G.add_edge('e','y', capacity=3.0) >>> flow, auxiliary = nx.ford_fulkerson_flow_and_auxiliary(G, 'x', 'y') >>> flow 3.0 >>> # A dictionary with infinite capacity flows can be found as an >>> # attribute of the auxiliary network >>> inf_capacity_flows = auxiliary.graph['inf_capacity_flows'] """ if G.is_multigraph(): raise nx.NetworkXError( 'MultiGraph and MultiDiGraph not supported (yet).') if s not in G: raise nx.NetworkXError('node %s not in graph' % str(s)) if t not in G: raise nx.NetworkXError('node %s not in graph' % str(t)) auxiliary = _create_auxiliary_digraph(G, capacity=capacity) inf_capacity_flows = auxiliary.graph['inf_capacity_flows'] flow_value = 0 # Initial feasible flow. # As long as there is an (s, t)-path in the auxiliary digraph, find # the shortest (with respect to the number of arcs) such path and # augment the flow on this path. while True: try: path_nodes = nx.bidirectional_shortest_path(auxiliary, s, t) except nx.NetworkXNoPath: break # Get the list of edges in the shortest path. path_edges = list(zip(path_nodes[:-1], path_nodes[1:])) # Find the minimum capacity of an edge in the path. try: path_capacity = min([auxiliary[u][v][capacity] for u, v in path_edges if capacity in auxiliary[u][v]]) except ValueError: # path of infinite capacity implies no max flow raise nx.NetworkXUnbounded( "Infinite capacity path, flow unbounded above.") flow_value += path_capacity # Augment the flow along the path. for u, v in path_edges: edge_attr = auxiliary[u][v] if capacity in edge_attr: edge_attr[capacity] -= path_capacity if edge_attr[capacity] == 0: auxiliary.remove_edge(u, v) else: inf_capacity_flows[(u, v)] += path_capacity if auxiliary.has_edge(v, u): if capacity in auxiliary[v][u]: auxiliary[v][u][capacity] += path_capacity else: auxiliary.add_edge(v, u, {capacity: path_capacity}) auxiliary.graph['inf_capacity_flows'] = inf_capacity_flows return flow_value, auxiliary def _create_auxiliary_digraph(G, capacity='capacity'): """Initialize an auxiliary digraph and dict of infinite capacity edges for a given graph G. Ignore edges with capacity <= 0. """ auxiliary = nx.DiGraph() auxiliary.add_nodes_from(G) inf_capacity_flows = {} if nx.is_directed(G): for edge in G.edges(data = True): if capacity in edge[2]: if edge[2][capacity] > 0: auxiliary.add_edge(*edge) else: auxiliary.add_edge(*edge) inf_capacity_flows[(edge[0], edge[1])] = 0 else: for edge in G.edges(data = True): if capacity in edge[2]: if edge[2][capacity] > 0: auxiliary.add_edge(*edge) auxiliary.add_edge(edge[1], edge[0], edge[2]) else: auxiliary.add_edge(*edge) auxiliary.add_edge(edge[1], edge[0], edge[2]) inf_capacity_flows[(edge[0], edge[1])] = 0 inf_capacity_flows[(edge[1], edge[0])] = 0 auxiliary.graph['inf_capacity_flows'] = inf_capacity_flows return auxiliary def _create_flow_dict(G, H, capacity='capacity'): """Creates the flow dict of dicts on G corresponding to the auxiliary digraph H and infinite capacity edges flows inf_capacity_flows. """ inf_capacity_flows = H.graph['inf_capacity_flows'] flow = dict([(u, {}) for u in G]) if G.is_directed(): for u, v in G.edges_iter(): if H.has_edge(u, v): if capacity in G[u][v]: flow[u][v] = max(0, G[u][v][capacity] - H[u][v][capacity]) elif G.has_edge(v, u) and not capacity in G[v][u]: flow[u][v] = max(0, inf_capacity_flows[(u, v)] - inf_capacity_flows[(v, u)]) else: flow[u][v] = max(0, H[v].get(u, {}).get(capacity, 0) - G[v].get(u, {}).get(capacity, 0)) else: flow[u][v] = G[u][v][capacity] else: # undirected for u, v in G.edges_iter(): if H.has_edge(u, v): if capacity in G[u][v]: flow[u][v] = abs(G[u][v][capacity] - H[u][v][capacity]) else: flow[u][v] = abs(inf_capacity_flows[(u, v)] - inf_capacity_flows[(v, u)]) else: flow[u][v] = G[u][v][capacity] flow[v][u] = flow[u][v] return flow def ford_fulkerson(G, s, t, capacity='capacity'): """Find a maximum single-commodity flow using the Ford-Fulkerson algorithm. This algorithm uses Edmonds-Karp-Dinitz path selection rule which guarantees a running time of `O(nm^2)` for `n` nodes and `m` edges. Parameters ---------- G : NetworkX graph Edges of the graph are expected to have an attribute called 'capacity'. If this attribute is not present, the edge is considered to have infinite capacity. s : node Source node for the flow. t : node Sink node for the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. Returns ------- flow_value : integer, float Value of the maximum flow, i.e., net outflow from the source. flow_dict : dictionary Dictionary of dictionaries keyed by nodes such that flow_dict[u][v] is the flow edge (u, v). Raises ------ NetworkXError The algorithm does not support MultiGraph and MultiDiGraph. If the input graph is an instance of one of these two classes, a NetworkXError is raised. NetworkXUnbounded If the graph has a path of infinite capacity, the value of a feasible flow on the graph is unbounded above and the function raises a NetworkXUnbounded. Examples -------- >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edge('x','a', capacity=3.0) >>> G.add_edge('x','b', capacity=1.0) >>> G.add_edge('a','c', capacity=3.0) >>> G.add_edge('b','c', capacity=5.0) >>> G.add_edge('b','d', capacity=4.0) >>> G.add_edge('d','e', capacity=2.0) >>> G.add_edge('c','y', capacity=2.0) >>> G.add_edge('e','y', capacity=3.0) >>> flow, F = nx.ford_fulkerson(G, 'x', 'y') >>> flow 3.0 """ flow_value, auxiliary = ford_fulkerson_flow_and_auxiliary(G, s, t, capacity=capacity) flow_dict = _create_flow_dict(G, auxiliary, capacity=capacity) return flow_value, flow_dict def ford_fulkerson_flow(G, s, t, capacity='capacity'): """Return a maximum flow for a single-commodity flow problem. Parameters ---------- G : NetworkX graph Edges of the graph are expected to have an attribute called 'capacity'. If this attribute is not present, the edge is considered to have infinite capacity. s : node Source node for the flow. t : node Sink node for the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. Returns ------- flow_dict : dictionary Dictionary of dictionaries keyed by nodes such that flow_dict[u][v] is the flow edge (u, v). Raises ------ NetworkXError The algorithm does not support MultiGraph and MultiDiGraph. If the input graph is an instance of one of these two classes, a NetworkXError is raised. NetworkXUnbounded If the graph has a path of infinite capacity, the value of a feasible flow on the graph is unbounded above and the function raises a NetworkXUnbounded. Examples -------- >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edge('x','a', capacity=3.0) >>> G.add_edge('x','b', capacity=1.0) >>> G.add_edge('a','c', capacity=3.0) >>> G.add_edge('b','c', capacity=5.0) >>> G.add_edge('b','d', capacity=4.0) >>> G.add_edge('d','e', capacity=2.0) >>> G.add_edge('c','y', capacity=2.0) >>> G.add_edge('e','y', capacity=3.0) >>> F = nx.ford_fulkerson_flow(G, 'x', 'y') >>> for u, v in sorted(G.edges_iter()): ... print('(%s, %s) %.2f' % (u, v, F[u][v])) ... (a, c) 2.00 (b, c) 0.00 (b, d) 1.00 (c, y) 2.00 (d, e) 1.00 (e, y) 1.00 (x, a) 2.00 (x, b) 1.00 """ flow_value, auxiliary = ford_fulkerson_flow_and_auxiliary(G, s, t, capacity=capacity) return _create_flow_dict(G, auxiliary, capacity=capacity) def max_flow(G, s, t, capacity='capacity'): """Find the value of a maximum single-commodity flow. Parameters ---------- G : NetworkX graph Edges of the graph are expected to have an attribute called 'capacity'. If this attribute is not present, the edge is considered to have infinite capacity. s : node Source node for the flow. t : node Sink node for the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. Returns ------- flow_value : integer, float Value of the maximum flow, i.e., net outflow from the source. Raises ------ NetworkXError The algorithm does not support MultiGraph and MultiDiGraph. If the input graph is an instance of one of these two classes, a NetworkXError is raised. NetworkXUnbounded If the graph has a path of infinite capacity, the value of a feasible flow on the graph is unbounded above and the function raises a NetworkXUnbounded. Examples -------- >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edge('x','a', capacity=3.0) >>> G.add_edge('x','b', capacity=1.0) >>> G.add_edge('a','c', capacity=3.0) >>> G.add_edge('b','c', capacity=5.0) >>> G.add_edge('b','d', capacity=4.0) >>> G.add_edge('d','e', capacity=2.0) >>> G.add_edge('c','y', capacity=2.0) >>> G.add_edge('e','y', capacity=3.0) >>> flow = nx.max_flow(G, 'x', 'y') >>> flow 3.0 """ return ford_fulkerson_flow_and_auxiliary(G, s, t, capacity=capacity)[0] def min_cut(G, s, t, capacity='capacity'): """Compute the value of a minimum (s, t)-cut. Use the max-flow min-cut theorem, i.e., the capacity of a minimum capacity cut is equal to the flow value of a maximum flow. Parameters ---------- G : NetworkX graph Edges of the graph are expected to have an attribute called 'capacity'. If this attribute is not present, the edge is considered to have infinite capacity. s : node Source node for the flow. t : node Sink node for the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. Returns ------- cutValue : integer, float Value of the minimum cut. Raises ------ NetworkXUnbounded If the graph has a path of infinite capacity, all cuts have infinite capacity and the function raises a NetworkXError. Examples -------- >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_edge('x','a', capacity = 3.0) >>> G.add_edge('x','b', capacity = 1.0) >>> G.add_edge('a','c', capacity = 3.0) >>> G.add_edge('b','c', capacity = 5.0) >>> G.add_edge('b','d', capacity = 4.0) >>> G.add_edge('d','e', capacity = 2.0) >>> G.add_edge('c','y', capacity = 2.0) >>> G.add_edge('e','y', capacity = 3.0) >>> nx.min_cut(G, 'x', 'y') 3.0 """ try: return ford_fulkerson_flow_and_auxiliary(G, s, t, capacity=capacity)[0] except nx.NetworkXUnbounded: raise nx.NetworkXUnbounded( "Infinite capacity path, no minimum cut.") networkx-1.8.1/networkx/algorithms/flow/mincost.py0000664000175000017500000007301412177456333022331 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Minimum cost flow algorithms on directed connected graphs. """ __author__ = """Loïc Séguin-C. """ # Copyright (C) 2010 Loïc Séguin-C. # All rights reserved. # BSD license. __all__ = ['network_simplex', 'min_cost_flow_cost', 'min_cost_flow', 'cost_of_flow', 'max_flow_min_cost'] import networkx as nx from networkx.utils import generate_unique_node def _initial_tree_solution(G, demand = 'demand', capacity = 'capacity', weight = 'weight'): """Find a initial tree solution rooted at r. The initial tree solution is obtained by considering edges (r, v) for all nodes v with non-negative demand and (v, r) for all nodes with negative demand. If these edges do not exist, we add them to the graph and call them artificial edges. """ H = nx.DiGraph((edge for edge in G.edges(data=True) if edge[2].get(capacity, 1) > 0)) demand_nodes = (node for node in G.nodes_iter(data=True) if node[1].get(demand, 0) != 0) H.add_nodes_from(demand_nodes) r = H.nodes()[0] T = nx.DiGraph() y = {r: 0} artificialEdges = [] flowCost = 0 n = H.number_of_nodes() try: maxWeight = max(abs(d[weight]) for u, v, d in H.edges(data = True) if weight in d) except ValueError: maxWeight = 0 hugeWeight = 1 + n * maxWeight for v, d in H.nodes(data = True)[1:]: vDemand = d.get(demand, 0) if vDemand >= 0: if not (r, v) in H.edges(): H.add_edge(r, v, {weight: hugeWeight, 'flow': vDemand}) artificialEdges.append((r, v)) y[v] = H[r][v].get(weight, 0) T.add_edge(r, v) flowCost += vDemand * H[r][v].get(weight, 0) else: # (r, v) in H.edges() if (not capacity in H[r][v] or vDemand <= H[r][v][capacity]): H[r][v]['flow'] = vDemand y[v] = H[r][v].get(weight, 0) T.add_edge(r, v) flowCost += vDemand * H[r][v].get(weight, 0) else: # existing edge does not have enough capacity newLabel = generate_unique_node() H.add_edge(r, newLabel, {weight: hugeWeight, 'flow': vDemand}) H.add_edge(newLabel, v, {weight: hugeWeight, 'flow': vDemand}) artificialEdges.append((r, newLabel)) artificialEdges.append((newLabel, v)) y[v] = 2 * hugeWeight y[newLabel] = hugeWeight T.add_edge(r, newLabel) T.add_edge(newLabel, v) flowCost += 2 * vDemand * hugeWeight else: # vDemand < 0 if not (v, r) in H.edges(): H.add_edge(v, r, {weight: hugeWeight, 'flow': -vDemand}) artificialEdges.append((v, r)) y[v] = -H[v][r].get(weight, 0) T.add_edge(v, r) flowCost += -vDemand * H[v][r].get(weight, 0) else: if (not capacity in H[v][r] or -vDemand <= H[v][r][capacity]): H[v][r]['flow'] = -vDemand y[v] = -H[v][r].get(weight, 0) T.add_edge(v, r) flowCost += -vDemand * H[v][r].get(weight, 0) else: # existing edge does not have enough capacity newLabel = generate_unique_node() H.add_edge(v, newLabel, {weight: hugeWeight, 'flow': -vDemand}) H.add_edge(newLabel, r, {weight: hugeWeight, 'flow': -vDemand}) artificialEdges.append((v, newLabel)) artificialEdges.append((newLabel, r)) y[v] = -2 * hugeWeight y[newLabel] = -hugeWeight T.add_edge(v, newLabel) T.add_edge(newLabel, r) flowCost += 2 * -vDemand * hugeWeight return H, T, y, artificialEdges, flowCost, r def _find_entering_edge(H, c, capacity = 'capacity'): """Find an edge which creates a negative cost cycle in the actual tree solution. The reduced cost of every edge gives the value of the cycle obtained by adding that edge to the tree solution. If that value is negative, we will augment the flow in the direction indicated by the edge. Otherwise, we will augment the flow in the reverse direction. If no edge is found, return and empty tuple. This will cause the main loop of the algorithm to terminate. """ newEdge = () for u, v, d in H.edges_iter(data = True): if d.get('flow', 0) == 0: if c[(u, v)] < 0: newEdge = (u, v) break else: if capacity in d: if (d.get('flow', 0) == d[capacity] and c[(u, v)] > 0): newEdge = (u, v) break return newEdge def _find_leaving_edge(H, T, cycle, newEdge, capacity = 'capacity', reverse=False): """Find an edge that will leave the basis and the value by which we can increase or decrease the flow on that edge. The leaving arc rule is used to prevent cycling. If cycle has no reverse edge and no forward edge of finite capacity, it means that cycle is a negative cost infinite capacity cycle. This implies that the cost of a flow satisfying all demands is unbounded below. An exception is raised in this case. """ eps = False leavingEdge = () # If cycle is a digon. if len(cycle) == 3: u, v = newEdge if capacity not in H[u][v] and capacity not in H[v][u]: raise nx.NetworkXUnbounded( "Negative cost cycle of infinite capacity found. " + "Min cost flow unbounded below.") if reverse: if H[u][v].get('flow', 0) > H[v][u].get('flow', 0): return (v, u), H[v][u].get('flow', 0) else: return (u, v), H[u][v].get('flow', 0) else: uv_residual = H[u][v].get(capacity, 0) - H[u][v].get('flow', 0) vu_residual = H[v][u].get(capacity, 0) - H[v][u].get('flow', 0) if (uv_residual > vu_residual): return (v, u), vu_residual else: return (u, v), uv_residual # Find the forward edge with the minimum value for capacity - 'flow' # and the reverse edge with the minimum value for 'flow'. for index, u in enumerate(cycle[:-1]): edgeCapacity = False edge = () v = cycle[index + 1] if (u, v) in T.edges() + [newEdge]: #forward edge if capacity in H[u][v]: # edge (u, v) has finite capacity edgeCapacity = H[u][v][capacity] - H[u][v].get('flow', 0) edge = (u, v) else: #reverse edge edgeCapacity = H[v][u].get('flow', 0) edge = (v, u) # Determine if edge might be the leaving edge. if edge: if leavingEdge: if edgeCapacity < eps: eps = edgeCapacity leavingEdge = edge else: eps = edgeCapacity leavingEdge = edge if not leavingEdge: raise nx.NetworkXUnbounded( "Negative cost cycle of infinite capacity found. " + "Min cost flow unbounded below.") return leavingEdge, eps def _create_flow_dict(G, H): """Creates the flow dict of dicts of graph G with auxiliary graph H.""" flowDict = dict([(u, {}) for u in G]) for u in G.nodes_iter(): for v in G.neighbors(u): if H.has_edge(u, v): flowDict[u][v] = H[u][v].get('flow', 0) else: flowDict[u][v] = 0 return flowDict def network_simplex(G, demand = 'demand', capacity = 'capacity', weight = 'weight'): """Find a minimum cost flow satisfying all demands in digraph G. This is a primal network simplex algorithm that uses the leaving arc rule to prevent cycling. G is a digraph with edge costs and capacities and in which nodes have demand, i.e., they want to send or receive some amount of flow. A negative demand means that the node wants to send flow, a positive demand means that the node want to receive flow. A flow on the digraph G satisfies all demand if the net flow into each node is equal to the demand of that node. Parameters ---------- G : NetworkX graph DiGraph on which a minimum cost flow satisfying all demands is to be found. demand: string Nodes of the graph G are expected to have an attribute demand that indicates how much flow a node wants to send (negative demand) or receive (positive demand). Note that the sum of the demands should be 0 otherwise the problem in not feasible. If this attribute is not present, a node is considered to have 0 demand. Default value: 'demand'. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. weight: string Edges of the graph G are expected to have an attribute weight that indicates the cost incurred by sending one unit of flow on that edge. If not present, the weight is considered to be 0. Default value: 'weight'. Returns ------- flowCost: integer, float Cost of a minimum cost flow satisfying all demands. flowDict: dictionary Dictionary of dictionaries keyed by nodes such that flowDict[u][v] is the flow edge (u, v). Raises ------ NetworkXError This exception is raised if the input graph is not directed, not connected or is a multigraph. NetworkXUnfeasible This exception is raised in the following situations: * The sum of the demands is not zero. Then, there is no flow satisfying all demands. * There is no flow satisfying all demand. NetworkXUnbounded This exception is raised if the digraph G has a cycle of negative cost and infinite capacity. Then, the cost of a flow satisfying all demands is unbounded below. Notes ----- This algorithm is not guaranteed to work if edge weights are floating point numbers (overflows and roundoff errors can cause problems). See also -------- cost_of_flow, max_flow_min_cost, min_cost_flow, min_cost_flow_cost Examples -------- A simple example of a min cost flow problem. >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_node('a', demand = -5) >>> G.add_node('d', demand = 5) >>> G.add_edge('a', 'b', weight = 3, capacity = 4) >>> G.add_edge('a', 'c', weight = 6, capacity = 10) >>> G.add_edge('b', 'd', weight = 1, capacity = 9) >>> G.add_edge('c', 'd', weight = 2, capacity = 5) >>> flowCost, flowDict = nx.network_simplex(G) >>> flowCost 24 >>> flowDict # doctest: +SKIP {'a': {'c': 1, 'b': 4}, 'c': {'d': 1}, 'b': {'d': 4}, 'd': {}} The mincost flow algorithm can also be used to solve shortest path problems. To find the shortest path between two nodes u and v, give all edges an infinite capacity, give node u a demand of -1 and node v a demand a 1. Then run the network simplex. The value of a min cost flow will be the distance between u and v and edges carrying positive flow will indicate the path. >>> G=nx.DiGraph() >>> G.add_weighted_edges_from([('s','u',10), ('s','x',5), ... ('u','v',1), ('u','x',2), ... ('v','y',1), ('x','u',3), ... ('x','v',5), ('x','y',2), ... ('y','s',7), ('y','v',6)]) >>> G.add_node('s', demand = -1) >>> G.add_node('v', demand = 1) >>> flowCost, flowDict = nx.network_simplex(G) >>> flowCost == nx.shortest_path_length(G, 's', 'v', weight = 'weight') True >>> sorted([(u, v) for u in flowDict for v in flowDict[u] if flowDict[u][v] > 0]) [('s', 'x'), ('u', 'v'), ('x', 'u')] >>> nx.shortest_path(G, 's', 'v', weight = 'weight') ['s', 'x', 'u', 'v'] It is possible to change the name of the attributes used for the algorithm. >>> G = nx.DiGraph() >>> G.add_node('p', spam = -4) >>> G.add_node('q', spam = 2) >>> G.add_node('a', spam = -2) >>> G.add_node('d', spam = -1) >>> G.add_node('t', spam = 2) >>> G.add_node('w', spam = 3) >>> G.add_edge('p', 'q', cost = 7, vacancies = 5) >>> G.add_edge('p', 'a', cost = 1, vacancies = 4) >>> G.add_edge('q', 'd', cost = 2, vacancies = 3) >>> G.add_edge('t', 'q', cost = 1, vacancies = 2) >>> G.add_edge('a', 't', cost = 2, vacancies = 4) >>> G.add_edge('d', 'w', cost = 3, vacancies = 4) >>> G.add_edge('t', 'w', cost = 4, vacancies = 1) >>> flowCost, flowDict = nx.network_simplex(G, demand = 'spam', ... capacity = 'vacancies', ... weight = 'cost') >>> flowCost 37 >>> flowDict # doctest: +SKIP {'a': {'t': 4}, 'd': {'w': 2}, 'q': {'d': 1}, 'p': {'q': 2, 'a': 2}, 't': {'q': 1, 'w': 1}, 'w': {}} References ---------- W. J. Cook, W. H. Cunningham, W. R. Pulleyblank and A. Schrijver. Combinatorial Optimization. Wiley-Interscience, 1998. """ if not G.is_directed(): raise nx.NetworkXError("Undirected graph not supported.") if not nx.is_connected(G.to_undirected()): raise nx.NetworkXError("Not connected graph not supported.") if G.is_multigraph(): raise nx.NetworkXError("MultiDiGraph not supported.") if sum(d[demand] for v, d in G.nodes(data = True) if demand in d) != 0: raise nx.NetworkXUnfeasible("Sum of the demands should be 0.") # Fix an arbitrarily chosen root node and find an initial tree solution. H, T, y, artificialEdges, flowCost, r = \ _initial_tree_solution(G, demand = demand, capacity = capacity, weight = weight) # Initialize the reduced costs. c = {} for u, v, d in H.edges_iter(data = True): c[(u, v)] = d.get(weight, 0) + y[u] - y[v] # Print stuff for debugging. # print('-' * 78) # nbIter = 0 # print('Iteration %d' % nbIter) # nbIter += 1 # print('Tree solution: %s' % T.edges()) # print(' Edge %11s%10s' % ('Flow', 'Red Cost')) # for u, v, d in H.edges(data = True): # flag = '' # if (u, v) in artificialEdges: # flag = '*' # print('(%s, %s)%1s%10d%10d' % (u, v, flag, d.get('flow', 0), # c[(u, v)])) # print('Distances: %s' % y) # Main loop. while True: newEdge = _find_entering_edge(H, c, capacity = capacity) if not newEdge: break # Optimal basis found. Main loop is over. cycleCost = abs(c[newEdge]) # Find the cycle created by adding newEdge to T. path1 = nx.shortest_path(T.to_undirected(), r, newEdge[0]) path2 = nx.shortest_path(T.to_undirected(), r, newEdge[1]) join = r for index, node in enumerate(path1[1:]): if index + 1 < len(path2) and node == path2[index + 1]: join = node else: break path1 = path1[path1.index(join):] path2 = path2[path2.index(join):] cycle = [] if H[newEdge[0]][newEdge[1]].get('flow', 0) == 0: reverse = False path2.reverse() cycle = path1 + path2 else: # newEdge is at capacity reverse = True path1.reverse() cycle = path2 + path1 # Find the leaving edge. Will stop here if cycle is an infinite # capacity negative cost cycle. leavingEdge, eps = _find_leaving_edge(H, T, cycle, newEdge, capacity=capacity, reverse=reverse) # Actual augmentation happens here. If eps = 0, don't bother. if eps: flowCost -= cycleCost * eps if len(cycle) == 3: if reverse: eps = -eps u, v = newEdge H[u][v]['flow'] = H[u][v].get('flow', 0) + eps H[v][u]['flow'] = H[v][u].get('flow', 0) + eps else: for index, u in enumerate(cycle[:-1]): v = cycle[index + 1] if (u, v) in T.edges() + [newEdge]: H[u][v]['flow'] = H[u][v].get('flow', 0) + eps else: # (v, u) in T.edges(): H[v][u]['flow'] -= eps # Update tree solution. T.add_edge(*newEdge) T.remove_edge(*leavingEdge) # Update distances and reduced costs. if newEdge != leavingEdge: forest = nx.DiGraph(T) forest.remove_edge(*newEdge) R, notR = nx.connected_component_subgraphs(forest.to_undirected()) if r in notR.nodes(): # make sure r is in R R, notR = notR, R if newEdge[0] in R.nodes(): for v in notR.nodes(): y[v] += c[newEdge] else: for v in notR.nodes(): y[v] -= c[newEdge] for u, v in H.edges(): if u in notR.nodes() or v in notR.nodes(): c[(u, v)] = H[u][v].get(weight, 0) + y[u] - y[v] # Print stuff for debugging. # print('-' * 78) # print('Iteration %d' % nbIter) # nbIter += 1 # print('Tree solution: %s' % T.edges()) # print('New edge: (%s, %s)' % (newEdge[0], newEdge[1])) # print('Leaving edge: (%s, %s)' % (leavingEdge[0], leavingEdge[1])) # print('Cycle: %s' % cycle) # print('eps: %d' % eps) # print(' Edge %11s%10s' % ('Flow', 'Red Cost')) # for u, v, d in H.edges(data = True): # flag = '' # if (u, v) in artificialEdges: # flag = '*' # print('(%s, %s)%1s%10d%10d' % (u, v, flag, d.get('flow', 0), # c[(u, v)])) # print('Distances: %s' % y) # If an artificial edge has positive flow, the initial problem was # not feasible. for u, v in artificialEdges: if H[u][v]['flow'] != 0: raise nx.NetworkXUnfeasible("No flow satisfying all demands.") H.remove_edge(u, v) for u in H.nodes(): if not u in G: H.remove_node(u) flowDict = _create_flow_dict(G, H) return flowCost, flowDict def min_cost_flow_cost(G, demand = 'demand', capacity = 'capacity', weight = 'weight'): """Find the cost of a minimum cost flow satisfying all demands in digraph G. G is a digraph with edge costs and capacities and in which nodes have demand, i.e., they want to send or receive some amount of flow. A negative demand means that the node wants to send flow, a positive demand means that the node want to receive flow. A flow on the digraph G satisfies all demand if the net flow into each node is equal to the demand of that node. Parameters ---------- G : NetworkX graph DiGraph on which a minimum cost flow satisfying all demands is to be found. demand: string Nodes of the graph G are expected to have an attribute demand that indicates how much flow a node wants to send (negative demand) or receive (positive demand). Note that the sum of the demands should be 0 otherwise the problem in not feasible. If this attribute is not present, a node is considered to have 0 demand. Default value: 'demand'. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. weight: string Edges of the graph G are expected to have an attribute weight that indicates the cost incurred by sending one unit of flow on that edge. If not present, the weight is considered to be 0. Default value: 'weight'. Returns ------- flowCost: integer, float Cost of a minimum cost flow satisfying all demands. Raises ------ NetworkXError This exception is raised if the input graph is not directed or not connected. NetworkXUnfeasible This exception is raised in the following situations: * The sum of the demands is not zero. Then, there is no flow satisfying all demands. * There is no flow satisfying all demand. NetworkXUnbounded This exception is raised if the digraph G has a cycle of negative cost and infinite capacity. Then, the cost of a flow satisfying all demands is unbounded below. See also -------- cost_of_flow, max_flow_min_cost, min_cost_flow, network_simplex Examples -------- A simple example of a min cost flow problem. >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_node('a', demand = -5) >>> G.add_node('d', demand = 5) >>> G.add_edge('a', 'b', weight = 3, capacity = 4) >>> G.add_edge('a', 'c', weight = 6, capacity = 10) >>> G.add_edge('b', 'd', weight = 1, capacity = 9) >>> G.add_edge('c', 'd', weight = 2, capacity = 5) >>> flowCost = nx.min_cost_flow_cost(G) >>> flowCost 24 """ return network_simplex(G, demand = demand, capacity = capacity, weight = weight)[0] def min_cost_flow(G, demand = 'demand', capacity = 'capacity', weight = 'weight'): """Return a minimum cost flow satisfying all demands in digraph G. G is a digraph with edge costs and capacities and in which nodes have demand, i.e., they want to send or receive some amount of flow. A negative demand means that the node wants to send flow, a positive demand means that the node want to receive flow. A flow on the digraph G satisfies all demand if the net flow into each node is equal to the demand of that node. Parameters ---------- G : NetworkX graph DiGraph on which a minimum cost flow satisfying all demands is to be found. demand: string Nodes of the graph G are expected to have an attribute demand that indicates how much flow a node wants to send (negative demand) or receive (positive demand). Note that the sum of the demands should be 0 otherwise the problem in not feasible. If this attribute is not present, a node is considered to have 0 demand. Default value: 'demand'. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. weight: string Edges of the graph G are expected to have an attribute weight that indicates the cost incurred by sending one unit of flow on that edge. If not present, the weight is considered to be 0. Default value: 'weight'. Returns ------- flowDict: dictionary Dictionary of dictionaries keyed by nodes such that flowDict[u][v] is the flow edge (u, v). Raises ------ NetworkXError This exception is raised if the input graph is not directed or not connected. NetworkXUnfeasible This exception is raised in the following situations: * The sum of the demands is not zero. Then, there is no flow satisfying all demands. * There is no flow satisfying all demand. NetworkXUnbounded This exception is raised if the digraph G has a cycle of negative cost and infinite capacity. Then, the cost of a flow satisfying all demands is unbounded below. See also -------- cost_of_flow, max_flow_min_cost, min_cost_flow_cost, network_simplex Examples -------- A simple example of a min cost flow problem. >>> import networkx as nx >>> G = nx.DiGraph() >>> G.add_node('a', demand = -5) >>> G.add_node('d', demand = 5) >>> G.add_edge('a', 'b', weight = 3, capacity = 4) >>> G.add_edge('a', 'c', weight = 6, capacity = 10) >>> G.add_edge('b', 'd', weight = 1, capacity = 9) >>> G.add_edge('c', 'd', weight = 2, capacity = 5) >>> flowDict = nx.min_cost_flow(G) """ return network_simplex(G, demand = demand, capacity = capacity, weight = weight)[1] def cost_of_flow(G, flowDict, weight = 'weight'): """Compute the cost of the flow given by flowDict on graph G. Note that this function does not check for the validity of the flow flowDict. This function will fail if the graph G and the flow don't have the same edge set. Parameters ---------- G : NetworkX graph DiGraph on which a minimum cost flow satisfying all demands is to be found. weight: string Edges of the graph G are expected to have an attribute weight that indicates the cost incurred by sending one unit of flow on that edge. If not present, the weight is considered to be 0. Default value: 'weight'. flowDict: dictionary Dictionary of dictionaries keyed by nodes such that flowDict[u][v] is the flow edge (u, v). Returns ------- cost: Integer, float The total cost of the flow. This is given by the sum over all edges of the product of the edge's flow and the edge's weight. See also -------- max_flow_min_cost, min_cost_flow, min_cost_flow_cost, network_simplex """ return sum((flowDict[u][v] * d.get(weight, 0) for u, v, d in G.edges_iter(data = True))) def max_flow_min_cost(G, s, t, capacity = 'capacity', weight = 'weight'): """Return a maximum (s, t)-flow of minimum cost. G is a digraph with edge costs and capacities. There is a source node s and a sink node t. This function finds a maximum flow from s to t whose total cost is minimized. Parameters ---------- G : NetworkX graph DiGraph on which a minimum cost flow satisfying all demands is to be found. s: node label Source of the flow. t: node label Destination of the flow. capacity: string Edges of the graph G are expected to have an attribute capacity that indicates how much flow the edge can support. If this attribute is not present, the edge is considered to have infinite capacity. Default value: 'capacity'. weight: string Edges of the graph G are expected to have an attribute weight that indicates the cost incurred by sending one unit of flow on that edge. If not present, the weight is considered to be 0. Default value: 'weight'. Returns ------- flowDict: dictionary Dictionary of dictionaries keyed by nodes such that flowDict[u][v] is the flow edge (u, v). Raises ------ NetworkXError This exception is raised if the input graph is not directed or not connected. NetworkXUnbounded This exception is raised if there is an infinite capacity path from s to t in G. In this case there is no maximum flow. This exception is also raised if the digraph G has a cycle of negative cost and infinite capacity. Then, the cost of a flow is unbounded below. See also -------- cost_of_flow, ford_fulkerson, min_cost_flow, min_cost_flow_cost, network_simplex Examples -------- >>> G = nx.DiGraph() >>> G.add_edges_from([(1, 2, {'capacity': 12, 'weight': 4}), ... (1, 3, {'capacity': 20, 'weight': 6}), ... (2, 3, {'capacity': 6, 'weight': -3}), ... (2, 6, {'capacity': 14, 'weight': 1}), ... (3, 4, {'weight': 9}), ... (3, 5, {'capacity': 10, 'weight': 5}), ... (4, 2, {'capacity': 19, 'weight': 13}), ... (4, 5, {'capacity': 4, 'weight': 0}), ... (5, 7, {'capacity': 28, 'weight': 2}), ... (6, 5, {'capacity': 11, 'weight': 1}), ... (6, 7, {'weight': 8}), ... (7, 4, {'capacity': 6, 'weight': 6})]) >>> mincostFlow = nx.max_flow_min_cost(G, 1, 7) >>> nx.cost_of_flow(G, mincostFlow) 373 >>> maxFlow = nx.ford_fulkerson_flow(G, 1, 7) >>> nx.cost_of_flow(G, maxFlow) 428 >>> mincostFlowValue = (sum((mincostFlow[u][7] for u in G.predecessors(7))) ... - sum((mincostFlow[7][v] for v in G.successors(7)))) >>> mincostFlowValue == nx.max_flow(G, 1, 7) True """ maxFlow = nx.max_flow(G, s, t, capacity = capacity) H = nx.DiGraph(G) H.add_node(s, demand = -maxFlow) H.add_node(t, demand = maxFlow) return min_cost_flow(H, capacity = capacity, weight = weight) networkx-1.8.1/networkx/algorithms/vitality.py0000664000175000017500000000467012177456333021555 0ustar aricaric00000000000000""" Vitality measures. """ # Copyright (C) 2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Renato Fabbri']) __all__ = ['closeness_vitality'] def weiner_index(G, weight=None): # compute sum of distances between all node pairs # (with optional weights) weiner=0.0 if weight is None: for n in G: path_length=nx.single_source_shortest_path_length(G,n) weiner+=sum(path_length.values()) else: for n in G: path_length=nx.single_source_dijkstra_path_length(G, n,weight=weight) weiner+=sum(path_length.values()) return weiner def closeness_vitality(G, weight=None): """Compute closeness vitality for nodes. Closeness vitality of a node is the change in the sum of distances between all node pairs when excluding that node. Parameters ---------- G : graph weight : None or string (optional) The name of the edge attribute used as weight. If None the edge weights are ignored. Returns ------- nodes : dictionary Dictionary with nodes as keys and closeness vitality as the value. Examples -------- >>> G=nx.cycle_graph(3) >>> nx.closeness_vitality(G) {0: 4.0, 1: 4.0, 2: 4.0} See Also -------- closeness_centrality() References ---------- .. [1] Ulrik Brandes, Sec. 3.6.2 in Network Analysis: Methodological Foundations, Springer, 2005. http://books.google.com/books?id=TTNhSm7HYrIC """ multigraph = G.is_multigraph() wig = weiner_index(G,weight) closeness_vitality = {} for n in G: # remove edges connected to node n and keep list of edges with data # could remove node n but it doesn't count anyway if multigraph: edges = G.edges(n,data=True,keys=True) if G.is_directed(): edges += G.in_edges(n,data=True,keys=True) else: edges = G.edges(n,data=True) if G.is_directed(): edges += G.in_edges(n,data=True) G.remove_edges_from(edges) closeness_vitality[n] = wig - weiner_index(G,weight) # add edges and data back to graph G.add_edges_from(edges) return closeness_vitality networkx-1.8.1/networkx/algorithms/assortativity/0000775000175000017500000000000012177457361022256 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/assortativity/mixing.py0000664000175000017500000001513212177456333024123 0ustar aricaric00000000000000#-*- coding: utf-8 -*- """ Mixing matrices for node attributes and degree. """ import networkx as nx from networkx.utils import dict_to_numpy_array from networkx.algorithms.assortativity.pairs import node_degree_xy, \ node_attribute_xy __author__ = ' '.join(['Aric Hagberg ']) __all__ = ['attribute_mixing_matrix', 'attribute_mixing_dict', 'degree_mixing_matrix', 'degree_mixing_dict', 'numeric_mixing_matrix', 'mixing_dict'] def attribute_mixing_dict(G,attribute,nodes=None,normalized=False): """Return dictionary representation of mixing matrix for attribute. Parameters ---------- G : graph NetworkX graph object. attribute : string Node attribute key. nodes: list or iterable (optional) Unse nodes in container to build the dict. The default is all nodes. normalized : bool (default=False) Return counts if False or probabilities if True. Examples -------- >>> G=nx.Graph() >>> G.add_nodes_from([0,1],color='red') >>> G.add_nodes_from([2,3],color='blue') >>> G.add_edge(1,3) >>> d=nx.attribute_mixing_dict(G,'color') >>> print(d['red']['blue']) 1 >>> print(d['blue']['red']) # d symmetric for undirected graphs 1 Returns ------- d : dictionary Counts or joint probability of occurrence of attribute pairs. """ xy_iter=node_attribute_xy(G,attribute,nodes) return mixing_dict(xy_iter,normalized=normalized) def attribute_mixing_matrix(G,attribute,nodes=None,mapping=None, normalized=True): """Return mixing matrix for attribute. Parameters ---------- G : graph NetworkX graph object. attribute : string Node attribute key. nodes: list or iterable (optional) Use only nodes in container to build the matrix. The default is all nodes. mapping : dictionary, optional Mapping from node attribute to integer index in matrix. If not specified, an arbitrary ordering will be used. normalized : bool (default=False) Return counts if False or probabilities if True. Returns ------- m: numpy array Counts or joint probability of occurrence of attribute pairs. """ d=attribute_mixing_dict(G,attribute,nodes) a=dict_to_numpy_array(d,mapping=mapping) if normalized: a=a/a.sum() return a def degree_mixing_dict(G, x='out', y='in', weight=None, nodes=None, normalized=False): """Return dictionary representation of mixing matrix for degree. Parameters ---------- G : graph NetworkX graph object. x: string ('in','out') The degree type for source node (directed graphs only). y: string ('in','out') The degree type for target node (directed graphs only). weight: string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. normalized : bool (default=False) Return counts if False or probabilities if True. Returns ------- d: dictionary Counts or joint probability of occurrence of degree pairs. """ xy_iter=node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) return mixing_dict(xy_iter,normalized=normalized) def degree_mixing_matrix(G, x='out', y='in', weight=None, nodes=None, normalized=True): """Return mixing matrix for attribute. Parameters ---------- G : graph NetworkX graph object. x: string ('in','out') The degree type for source node (directed graphs only). y: string ('in','out') The degree type for target node (directed graphs only). nodes: list or iterable (optional) Build the matrix using only nodes in container. The default is all nodes. weight: string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. normalized : bool (default=False) Return counts if False or probabilities if True. Returns ------- m: numpy array Counts, or joint probability, of occurrence of node degree. """ d=degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight) s=set(d.keys()) for k,v in d.items(): s.update(v.keys()) m=max(s) mapping=dict(zip(range(m+1),range(m+1))) a=dict_to_numpy_array(d,mapping=mapping) if normalized: a=a/a.sum() return a def numeric_mixing_matrix(G,attribute,nodes=None,normalized=True): """Return numeric mixing matrix for attribute. Parameters ---------- G : graph NetworkX graph object. attribute : string Node attribute key. nodes: list or iterable (optional) Build the matrix only with nodes in container. The default is all nodes. normalized : bool (default=False) Return counts if False or probabilities if True. Returns ------- m: numpy array Counts, or joint, probability of occurrence of node attribute pairs. """ d=attribute_mixing_dict(G,attribute,nodes) s=set(d.keys()) for k,v in d.items(): s.update(v.keys()) m=max(s) mapping=dict(zip(range(m+1),range(m+1))) a=dict_to_numpy_array(d,mapping=mapping) if normalized: a=a/a.sum() return a def mixing_dict(xy,normalized=False): """Return a dictionary representation of mixing matrix. Parameters ---------- xy : list or container of two-tuples Pairs of (x,y) items. attribute : string Node attribute key normalized : bool (default=False) Return counts if False or probabilities if True. Returns ------- d: dictionary Counts or Joint probability of occurrence of values in xy. """ d={} psum=0.0 for x,y in xy: if x not in d: d[x]={} if y not in d: d[y]={} v = d[x].get(y,0) d[x][y] = v+1 psum+=1 if normalized: for k,jdict in d.items(): for j in jdict: jdict[j]/=psum return d # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/assortativity/connectivity.py0000664000175000017500000001017712177456333025352 0ustar aricaric00000000000000#-*- coding: utf-8 -*- # Copyright (C) 2011 by # Jordi Torrents # Aric Hagberg # All rights reserved. # BSD license. from collections import defaultdict import networkx as nx __author__ = """\n""".join(['Jordi Torrents ', 'Aric Hagberg (hagberg@lanl.gov)']) __all__ = ['average_degree_connectivity', 'k_nearest_neighbors'] def _avg_deg_conn(G, neighbors, source_degree, target_degree, nodes=None, weight=None): # "k nearest neighbors, or neighbor_connectivity dsum = defaultdict(float) dnorm = defaultdict(float) for n,k in source_degree(nodes).items(): nbrdeg = target_degree(neighbors(n)) if weight is None: s = float(sum(nbrdeg.values())) else: # weight nbr degree by weight of (n,nbr) edge if neighbors == G.neighbors: s = float(sum((G[n][nbr].get(weight,1)*d for nbr,d in nbrdeg.items()))) elif neighbors == G.successors: s = float(sum((G[n][nbr].get(weight,1)*d for nbr,d in nbrdeg.items()))) elif neighbors == G.predecessors: s = float(sum((G[nbr][n].get(weight,1)*d for nbr,d in nbrdeg.items()))) dnorm[k] += source_degree(n, weight=weight) dsum[k] += s # normalize dc = {} for k,avg in dsum.items(): dc[k]=avg norm = dnorm[k] if avg > 0 and norm > 0: dc[k]/=norm return dc def average_degree_connectivity(G, source="in+out", target="in+out", nodes=None, weight=None): r"""Compute the average degree connectivity of graph. The average degree connectivity is the average nearest neighbor degree of nodes with degree k. For weighted graphs, an analogous measure can be computed using the weighted average neighbors degree defined in [1]_, for a node `i`, as: .. math:: k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j where `s_i` is the weighted degree of node `i`, `w_{ij}` is the weight of the edge that links `i` and `j`, and `N(i)` are the neighbors of node `i`. Parameters ---------- G : NetworkX graph source : "in"|"out"|"in+out" (default:"in+out") Directed graphs only. Use "in"- or "out"-degree for source node. target : "in"|"out"|"in+out" (default:"in+out" Directed graphs only. Use "in"- or "out"-degree for target node. nodes: list or iterable (optional) Compute neighbor connectivity for these nodes. The default is all nodes. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. Returns ------- d: dict A dictionary keyed by degree k with the value of average connectivity. Examples -------- >>> G=nx.path_graph(4) >>> G.edge[1][2]['weight'] = 3 >>> nx.k_nearest_neighbors(G) {1: 2.0, 2: 1.5} >>> nx.k_nearest_neighbors(G, weight='weight') {1: 2.0, 2: 1.75} See also -------- neighbors_average_degree Notes ----- This algorithm is sometimes called "k nearest neighbors'. References ---------- .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani, "The architecture of complex weighted networks". PNAS 101 (11): 3747–3752 (2004). """ source_degree = G.degree target_degree = G.degree neighbors = G.neighbors if G.is_directed(): direction = {'out':G.out_degree, 'in':G.in_degree, 'in+out': G.degree} source_degree = direction[source] target_degree = direction[target] if source == 'in': neighbors=G.predecessors elif source == 'out': neighbors=G.successors return _avg_deg_conn(G, neighbors, source_degree, target_degree, nodes=nodes, weight=weight) k_nearest_neighbors=average_degree_connectivity networkx-1.8.1/networkx/algorithms/assortativity/neighbor_degree.py0000664000175000017500000001032512177456333025737 0ustar aricaric00000000000000#-*- coding: utf-8 -*- # Copyright (C) 2011 by # Jordi Torrents # Aric Hagberg # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Jordi Torrents ', 'Aric Hagberg (hagberg@lanl.gov)']) __all__ = ["average_neighbor_degree"] def _average_nbr_deg(G, source_degree, target_degree, nodes=None, weight=None): # average degree of neighbors avg = {} for n,deg in source_degree(nodes,weight=weight).items(): # normalize but not by zero degree if deg == 0: deg = 1 nbrdeg = target_degree(G[n]) if weight is None: avg[n] = sum(nbrdeg.values())/float(deg) else: avg[n] = sum((G[n][nbr].get(weight,1)*d for nbr,d in nbrdeg.items()))/float(deg) return avg def average_neighbor_degree(G, source='out', target='out', nodes=None, weight=None): r"""Returns the average degree of the neighborhood of each node. The average degree of a node `i` is .. math:: k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j where `N(i)` are the neighbors of node `i` and `k_j` is the degree of node `j` which belongs to `N(i)`. For weighted graphs, an analogous measure can be defined [1]_, .. math:: k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j where `s_i` is the weighted degree of node `i`, `w_{ij}` is the weight of the edge that links `i` and `j` and `N(i)` are the neighbors of node `i`. Parameters ---------- G : NetworkX graph source : string ("in"|"out") Directed graphs only. Use "in"- or "out"-degree for source node. target : string ("in"|"out") Directed graphs only. Use "in"- or "out"-degree for target node. nodes : list or iterable, optional Compute neighbor degree for specified nodes. The default is all nodes in the graph. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. Returns ------- d: dict A dictionary keyed by node with average neighbors degree value. Examples -------- >>> G=nx.path_graph(4) >>> G.edge[0][1]['weight'] = 5 >>> G.edge[2][3]['weight'] = 3 >>> nx.average_neighbor_degree(G) {0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0} >>> nx.average_neighbor_degree(G, weight='weight') {0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0} >>> G=nx.DiGraph() >>> G.add_path([0,1,2,3]) >>> nx.average_neighbor_degree(G, source='in', target='in') {0: 1.0, 1: 1.0, 2: 1.0, 3: 0.0} >>> nx.average_neighbor_degree(G, source='out', target='out') {0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0} Notes ----- For directed graphs you can also specify in-degree or out-degree by passing keyword arguments. See Also -------- average_degree_connectivity References ---------- .. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani, "The architecture of complex weighted networks". PNAS 101 (11): 3747–3752 (2004). """ source_degree = G.degree target_degree = G.degree if G.is_directed(): direction = {'out':G.out_degree, 'in':G.in_degree} source_degree = direction[source] target_degree = direction[target] return _average_nbr_deg(G, source_degree, target_degree, nodes=nodes, weight=weight) # obsolete # def average_neighbor_in_degree(G, nodes=None, weight=None): # if not G.is_directed(): # raise nx.NetworkXError("Not defined for undirected graphs.") # return _average_nbr_deg(G, G.in_degree, G.in_degree, nodes, weight) # average_neighbor_in_degree.__doc__=average_neighbor_degree.__doc__ # def average_neighbor_out_degree(G, nodes=None, weight=None): # if not G.is_directed(): # raise nx.NetworkXError("Not defined for undirected graphs.") # return _average_nbr_deg(G, G.out_degree, G.out_degree, nodes, weight) # average_neighbor_out_degree.__doc__=average_neighbor_degree.__doc__ networkx-1.8.1/networkx/algorithms/assortativity/__init__.py0000664000175000017500000000044612177456333024371 0ustar aricaric00000000000000from networkx.algorithms.assortativity.connectivity import * from networkx.algorithms.assortativity.correlation import * from networkx.algorithms.assortativity.mixing import * from networkx.algorithms.assortativity.neighbor_degree import * from networkx.algorithms.assortativity.pairs import * networkx-1.8.1/networkx/algorithms/assortativity/pairs.py0000664000175000017500000000725612177456333023756 0ustar aricaric00000000000000#-*- coding: utf-8 -*- """Generators of x-y pairs of node data.""" import networkx as nx from networkx.utils import dict_to_numpy_array __author__ = ' '.join(['Aric Hagberg ']) __all__ = ['node_attribute_xy', 'node_degree_xy'] def node_attribute_xy(G, attribute, nodes=None): """Return iterator of node-attribute pairs for all edges in G. Parameters ---------- G: NetworkX graph attribute: key The node attribute key. nodes: list or iterable (optional) Use only edges that are adjacency to specified nodes. The default is all nodes. Returns ------- (x,y): 2-tuple Generates 2-tuple of (attribute,attribute) values. Examples -------- >>> G = nx.DiGraph() >>> G.add_node(1,color='red') >>> G.add_node(2,color='blue') >>> G.add_edge(1,2) >>> list(nx.node_attribute_xy(G,'color')) [('red', 'blue')] Notes ----- For undirected graphs each edge is produced twice, once for each edge representation (u,v) and (v,u), with the exception of self-loop edges which only appear once. """ if nodes is None: nodes = set(G) else: nodes = set(nodes) node = G.node for u,nbrsdict in G.adjacency_iter(): if u not in nodes: continue uattr = node[u].get(attribute,None) if G.is_multigraph(): for v,keys in nbrsdict.items(): vattr = node[v].get(attribute,None) for k,d in keys.items(): yield (uattr,vattr) else: for v,eattr in nbrsdict.items(): vattr = node[v].get(attribute,None) yield (uattr,vattr) def node_degree_xy(G, x='out', y='in', weight=None, nodes=None): """Generate node degree-degree pairs for edges in G. Parameters ---------- G: NetworkX graph x: string ('in','out') The degree type for source node (directed graphs only). y: string ('in','out') The degree type for target node (directed graphs only). weight: string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. nodes: list or iterable (optional) Use only edges that are adjacency to specified nodes. The default is all nodes. Returns ------- (x,y): 2-tuple Generates 2-tuple of (degree,degree) values. Examples -------- >>> G = nx.DiGraph() >>> G.add_edge(1,2) >>> list(nx.node_degree_xy(G,x='out',y='in')) [(1, 1)] >>> list(nx.node_degree_xy(G,x='in',y='out')) [(0, 0)] Notes ----- For undirected graphs each edge is produced twice, once for each edge representation (u,v) and (v,u), with the exception of self-loop edges which only appear once. """ if nodes is None: nodes = set(G) else: nodes = set(nodes) xdeg = G.degree_iter ydeg = G.degree_iter if G.is_directed(): direction = {'out':G.out_degree_iter, 'in':G.in_degree_iter} xdeg = direction[x] ydeg = direction[y] for u,degu in xdeg(nodes, weight=weight): neighbors = (nbr for _,nbr in G.edges_iter(u) if nbr in nodes) for v,degv in ydeg(neighbors, weight=weight): yield degu,degv # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/assortativity/tests/0000775000175000017500000000000012177457361023420 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/assortativity/tests/test_mixing.py0000664000175000017500000001414712177456333026331 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx as nx from base_test import BaseTestAttributeMixing,BaseTestDegreeMixing class TestDegreeMixingDict(BaseTestDegreeMixing): def test_degree_mixing_dict_undirected(self): d=nx.degree_mixing_dict(self.P4) d_result={1:{2:2}, 2:{1:2,2:2}, } assert_equal(d,d_result) def test_degree_mixing_dict_undirected_normalized(self): d=nx.degree_mixing_dict(self.P4, normalized=True) d_result={1:{2:1.0/3}, 2:{1:1.0/3,2:1.0/3}, } assert_equal(d,d_result) def test_degree_mixing_dict_directed(self): d=nx.degree_mixing_dict(self.D) print(d) d_result={1:{3:2}, 2:{1:1,3:1}, 3:{} } assert_equal(d,d_result) def test_degree_mixing_dict_multigraph(self): d=nx.degree_mixing_dict(self.M) d_result={1:{2:1}, 2:{1:1,3:3}, 3:{2:3} } assert_equal(d,d_result) class TestDegreeMixingMatrix(BaseTestDegreeMixing): @classmethod def setupClass(cls): global np global npt try: import numpy as np import numpy.testing as npt except ImportError: raise SkipTest('NumPy not available.') def test_degree_mixing_matrix_undirected(self): a_result=np.array([[0,0,0], [0,0,2], [0,2,2]] ) a=nx.degree_mixing_matrix(self.P4,normalized=False) npt.assert_equal(a,a_result) a=nx.degree_mixing_matrix(self.P4) npt.assert_equal(a,a_result/float(a_result.sum())) def test_degree_mixing_matrix_directed(self): a_result=np.array([[0,0,0,0], [0,0,0,2], [0,1,0,1], [0,0,0,0]] ) a=nx.degree_mixing_matrix(self.D,normalized=False) npt.assert_equal(a,a_result) a=nx.degree_mixing_matrix(self.D) npt.assert_equal(a,a_result/float(a_result.sum())) def test_degree_mixing_matrix_multigraph(self): a_result=np.array([[0,0,0,0], [0,0,1,0], [0,1,0,3], [0,0,3,0]] ) a=nx.degree_mixing_matrix(self.M,normalized=False) npt.assert_equal(a,a_result) a=nx.degree_mixing_matrix(self.M) npt.assert_equal(a,a_result/float(a_result.sum())) def test_degree_mixing_matrix_selfloop(self): a_result=np.array([[0,0,0], [0,0,0], [0,0,2]] ) a=nx.degree_mixing_matrix(self.S,normalized=False) npt.assert_equal(a,a_result) a=nx.degree_mixing_matrix(self.S) npt.assert_equal(a,a_result/float(a_result.sum())) class TestAttributeMixingDict(BaseTestAttributeMixing): def test_attribute_mixing_dict_undirected(self): d=nx.attribute_mixing_dict(self.G,'fish') d_result={'one':{'one':2,'red':1}, 'two':{'two':2,'blue':1}, 'red':{'one':1}, 'blue':{'two':1} } assert_equal(d,d_result) def test_attribute_mixing_dict_directed(self): d=nx.attribute_mixing_dict(self.D,'fish') d_result={'one':{'one':1,'red':1}, 'two':{'two':1,'blue':1}, 'red':{}, 'blue':{} } assert_equal(d,d_result) def test_attribute_mixing_dict_multigraph(self): d=nx.attribute_mixing_dict(self.M,'fish') d_result={'one':{'one':4}, 'two':{'two':2}, } assert_equal(d,d_result) class TestAttributeMixingMatrix(BaseTestAttributeMixing): @classmethod def setupClass(cls): global np global npt try: import numpy as np import numpy.testing as npt except ImportError: raise SkipTest('NumPy not available.') def test_attribute_mixing_matrix_undirected(self): mapping={'one':0,'two':1,'red':2,'blue':3} a_result=np.array([[2,0,1,0], [0,2,0,1], [1,0,0,0], [0,1,0,0]] ) a=nx.attribute_mixing_matrix(self.G,'fish', mapping=mapping, normalized=False) npt.assert_equal(a,a_result) a=nx.attribute_mixing_matrix(self.G,'fish', mapping=mapping) npt.assert_equal(a,a_result/float(a_result.sum())) def test_attribute_mixing_matrix_directed(self): mapping={'one':0,'two':1,'red':2,'blue':3} a_result=np.array([[1,0,1,0], [0,1,0,1], [0,0,0,0], [0,0,0,0]] ) a=nx.attribute_mixing_matrix(self.D,'fish', mapping=mapping, normalized=False) npt.assert_equal(a,a_result) a=nx.attribute_mixing_matrix(self.D,'fish', mapping=mapping) npt.assert_equal(a,a_result/float(a_result.sum())) def test_attribute_mixing_matrix_multigraph(self): mapping={'one':0,'two':1,'red':2,'blue':3} a_result=np.array([[4,0,0,0], [0,2,0,0], [0,0,0,0], [0,0,0,0]] ) a=nx.attribute_mixing_matrix(self.M,'fish', mapping=mapping, normalized=False) npt.assert_equal(a,a_result) a=nx.attribute_mixing_matrix(self.M,'fish', mapping=mapping) npt.assert_equal(a,a_result/float(a_result.sum())) networkx-1.8.1/networkx/algorithms/assortativity/tests/base_test.py0000664000175000017500000000272112177456333025743 0ustar aricaric00000000000000import networkx as nx class BaseTestAttributeMixing(object): def setUp(self): G=nx.Graph() G.add_nodes_from([0,1],fish='one') G.add_nodes_from([2,3],fish='two') G.add_nodes_from([4],fish='red') G.add_nodes_from([5],fish='blue') G.add_edges_from([(0,1),(2,3),(0,4),(2,5)]) self.G=G D=nx.DiGraph() D.add_nodes_from([0,1],fish='one') D.add_nodes_from([2,3],fish='two') D.add_nodes_from([4],fish='red') D.add_nodes_from([5],fish='blue') D.add_edges_from([(0,1),(2,3),(0,4),(2,5)]) self.D=D M=nx.MultiGraph() M.add_nodes_from([0,1],fish='one') M.add_nodes_from([2,3],fish='two') M.add_nodes_from([4],fish='red') M.add_nodes_from([5],fish='blue') M.add_edges_from([(0,1),(0,1),(2,3)]) self.M=M S=nx.Graph() S.add_nodes_from([0,1],fish='one') S.add_nodes_from([2,3],fish='two') S.add_nodes_from([4],fish='red') S.add_nodes_from([5],fish='blue') S.add_edge(0,0) S.add_edge(2,2) self.S=S class BaseTestDegreeMixing(object): def setUp(self): self.P4=nx.path_graph(4) self.D=nx.DiGraph() self.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)]) self.M=nx.MultiGraph() self.M.add_path(list(range(4))) self.M.add_edge(0,1) self.S=nx.Graph() self.S.add_edges_from([(0,0),(1,1)]) networkx-1.8.1/networkx/algorithms/assortativity/tests/test_pairs.py0000664000175000017500000000741112177456333026150 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from base_test import BaseTestAttributeMixing,BaseTestDegreeMixing class TestAttributeMixingXY(BaseTestAttributeMixing): def test_node_attribute_xy_undirected(self): attrxy=sorted(nx.node_attribute_xy(self.G,'fish')) attrxy_result=sorted([('one','one'), ('one','one'), ('two','two'), ('two','two'), ('one','red'), ('red','one'), ('blue','two'), ('two','blue') ]) assert_equal(attrxy,attrxy_result) def test_node_attribute_xy_undirected_nodes(self): attrxy=sorted(nx.node_attribute_xy(self.G,'fish', nodes=['one','yellow'])) attrxy_result=sorted( [ ]) assert_equal(attrxy,attrxy_result) def test_node_attribute_xy_directed(self): attrxy=sorted(nx.node_attribute_xy(self.D,'fish')) attrxy_result=sorted([('one','one'), ('two','two'), ('one','red'), ('two','blue') ]) assert_equal(attrxy,attrxy_result) def test_node_attribute_xy_multigraph(self): attrxy=sorted(nx.node_attribute_xy(self.M,'fish')) attrxy_result=[('one','one'), ('one','one'), ('one','one'), ('one','one'), ('two','two'), ('two','two') ] assert_equal(attrxy,attrxy_result) def test_node_attribute_xy_selfloop(self): attrxy=sorted(nx.node_attribute_xy(self.S,'fish')) attrxy_result=[('one','one'), ('two','two') ] assert_equal(attrxy,attrxy_result) class TestDegreeMixingXY(BaseTestDegreeMixing): def test_node_degree_xy_undirected(self): xy=sorted(nx.node_degree_xy(self.P4)) xy_result=sorted([(1,2), (2,1), (2,2), (2,2), (1,2), (2,1)]) assert_equal(xy,xy_result) def test_node_degree_xy_undirected_nodes(self): xy=sorted(nx.node_degree_xy(self.P4,nodes=[0,1,-1])) xy_result=sorted([(1,2), (2,1),]) assert_equal(xy,xy_result) def test_node_degree_xy_directed(self): xy=sorted(nx.node_degree_xy(self.D)) xy_result=sorted([(2,1), (2,3), (1,3), (1,3)]) assert_equal(xy,xy_result) def test_node_degree_xy_multigraph(self): xy=sorted(nx.node_degree_xy(self.M)) xy_result=sorted([(2,3), (2,3), (3,2), (3,2), (2,3), (3,2), (1,2), (2,1)]) assert_equal(xy,xy_result) def test_node_degree_xy_selfloop(self): xy=sorted(nx.node_degree_xy(self.S)) xy_result=sorted([(2,2), (2,2)]) assert_equal(xy,xy_result) def test_node_degree_xy_weighted(self): G = nx.Graph() G.add_edge(1,2,weight=7) G.add_edge(2,3,weight=10) xy=sorted(nx.node_degree_xy(G,weight='weight')) xy_result=sorted([(7,17), (17,10), (17,7), (10,17)]) assert_equal(xy,xy_result) networkx-1.8.1/networkx/algorithms/assortativity/tests/test_connectivity.py0000664000175000017500000001032312177456333027544 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestNeighborConnectivity(object): def test_degree_p4(self): G=nx.path_graph(4) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.5} nd = nx.average_degree_connectivity(D) assert_equal(nd,answer) answer={1:2.0,2:1.5} D=G.to_directed() nd = nx.average_degree_connectivity(D, source='in', target='in') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_degree_connectivity(D, source='in', target='in') assert_equal(nd,answer) def test_degree_p4_weighted(self): G=nx.path_graph(4) G[1][2]['weight']=4 answer={1:2.0,2:1.8} nd = nx.average_degree_connectivity(G,weight='weight') assert_equal(nd,answer) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.8} nd = nx.average_degree_connectivity(D,weight='weight') assert_equal(nd,answer) answer={1:2.0,2:1.8} D=G.to_directed() nd = nx.average_degree_connectivity(D,weight='weight', source='in', target='in') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_degree_connectivity(D,source='in',target='out', weight='weight') assert_equal(nd,answer) def test_weight_keyword(self): G=nx.path_graph(4) G[1][2]['other']=4 answer={1:2.0,2:1.8} nd = nx.average_degree_connectivity(G,weight='other') assert_equal(nd,answer) answer={1:2.0,2:1.5} nd = nx.average_degree_connectivity(G,weight=None) assert_equal(nd,answer) D=G.to_directed() answer={2:2.0,4:1.8} nd = nx.average_degree_connectivity(D,weight='other') assert_equal(nd,answer) answer={1:2.0,2:1.8} D=G.to_directed() nd = nx.average_degree_connectivity(D,weight='other', source='in', target='in') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_degree_connectivity(D,weight='other',source='in', target='in') assert_equal(nd,answer) def test_degree_barrat(self): G=nx.star_graph(5) G.add_edges_from([(5,6),(5,7),(5,8),(5,9)]) G[0][5]['weight']=5 nd = nx.average_degree_connectivity(G)[5] assert_equal(nd,1.8) nd = nx.average_degree_connectivity(G,weight='weight')[5] assert_almost_equal(nd,3.222222,places=5) nd = nx.k_nearest_neighbors(G,weight='weight')[5] assert_almost_equal(nd,3.222222,places=5) def test_zero_deg(self): G=nx.DiGraph() G.add_edge(1,2) G.add_edge(1,3) G.add_edge(1,4) c = nx.average_degree_connectivity(G) assert_equal(c,{1:0,3:1}) c = nx.average_degree_connectivity(G, source='in', target='in') assert_equal(c,{0:0,1:0}) c = nx.average_degree_connectivity(G, source='in', target='out') assert_equal(c,{0:0,1:3}) c = nx.average_degree_connectivity(G, source='in', target='in+out') assert_equal(c,{0:0,1:3}) c = nx.average_degree_connectivity(G, source='out', target='out') assert_equal(c,{0:0,3:0}) c = nx.average_degree_connectivity(G, source='out', target='in') assert_equal(c,{0:0,3:1}) c = nx.average_degree_connectivity(G, source='out', target='in+out') assert_equal(c,{0:0,3:1}) def test_in_out_weight(self): from itertools import permutations G=nx.DiGraph() G.add_edge(1,2,weight=1) G.add_edge(1,3,weight=1) G.add_edge(3,1,weight=1) for s,t in permutations(['in','out','in+out'],2): c = nx.average_degree_connectivity(G, source=s, target=t) cw = nx.average_degree_connectivity(G,source=s, target=t, weight='weight') assert_equal(c,cw) networkx-1.8.1/networkx/algorithms/assortativity/tests/test_correlation.py0000664000175000017500000000631412177456333027354 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from nose import SkipTest import networkx as nx from base_test import BaseTestAttributeMixing,BaseTestDegreeMixing from networkx.algorithms.assortativity.correlation import attribute_ac class TestDegreeMixingCorrelation(BaseTestDegreeMixing): @classmethod def setupClass(cls): global np global npt try: import numpy as np import numpy.testing as npt except ImportError: raise SkipTest('NumPy not available.') try: import scipy import scipy.stats except ImportError: raise SkipTest('SciPy not available.') def test_degree_assortativity_undirected(self): r=nx.degree_assortativity_coefficient(self.P4) npt.assert_almost_equal(r,-1.0/2,decimal=4) def test_degree_assortativity_directed(self): r=nx.degree_assortativity_coefficient(self.D) npt.assert_almost_equal(r,-0.57735,decimal=4) def test_degree_assortativity_multigraph(self): r=nx.degree_assortativity_coefficient(self.M) npt.assert_almost_equal(r,-1.0/7.0,decimal=4) def test_degree_assortativity_undirected(self): r=nx.degree_pearson_correlation_coefficient(self.P4) npt.assert_almost_equal(r,-1.0/2,decimal=4) def test_degree_assortativity_directed(self): r=nx.degree_pearson_correlation_coefficient(self.D) npt.assert_almost_equal(r,-0.57735,decimal=4) def test_degree_assortativity_multigraph(self): r=nx.degree_pearson_correlation_coefficient(self.M) npt.assert_almost_equal(r,-1.0/7.0,decimal=4) class TestAttributeMixingCorrelation(BaseTestAttributeMixing): @classmethod def setupClass(cls): global np global npt try: import numpy as np import numpy.testing as npt except ImportError: raise SkipTest('NumPy not available.') def test_attribute_assortativity_undirected(self): r=nx.attribute_assortativity_coefficient(self.G,'fish') assert_equal(r,6.0/22.0) def test_attribute_assortativity_directed(self): r=nx.attribute_assortativity_coefficient(self.D,'fish') assert_equal(r,1.0/3.0) def test_attribute_assortativity_multigraph(self): r=nx.attribute_assortativity_coefficient(self.M,'fish') assert_equal(r,1.0) def test_attribute_assortativity_coefficient(self): # from "Mixing patterns in networks" a=np.array([[0.258,0.016,0.035,0.013], [0.012,0.157,0.058,0.019], [0.013,0.023,0.306,0.035], [0.005,0.007,0.024,0.016]]) r=attribute_ac(a) npt.assert_almost_equal(r,0.623,decimal=3) def test_attribute_assortativity_coefficient2(self): a=np.array([[0.18,0.02,0.01,0.03], [0.02,0.20,0.03,0.02], [0.01,0.03,0.16,0.01], [0.03,0.02,0.01,0.22]]) r=attribute_ac(a) npt.assert_almost_equal(r,0.68,decimal=2) def test_attribute_assortativity(self): a=np.array([[50,50,0],[50,50,0],[0,0,2]]) r=attribute_ac(a) npt.assert_almost_equal(r,0.029,decimal=3) networkx-1.8.1/networkx/algorithms/assortativity/tests/test_neighbor_degree.py0000664000175000017500000000462612177456333030147 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestAverageNeighbor(object): def test_degree_p4(self): G=nx.path_graph(4) answer={0:2,1:1.5,2:1.5,3:2} nd = nx.average_neighbor_degree(G) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D, source='in', target='in') assert_equal(nd,answer) def test_degree_p4_weighted(self): G=nx.path_graph(4) G[1][2]['weight']=4 answer={0:2,1:1.8,2:1.8,3:2} nd = nx.average_neighbor_degree(G,weight='weight') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D,weight='weight') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D,weight='weight') assert_equal(nd,answer) nd = nx.average_neighbor_degree(D,source='out',target='out', weight='weight') assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D,source='in',target='in', weight='weight') assert_equal(nd,answer) def test_degree_k4(self): G=nx.complete_graph(4) answer={0:3,1:3,2:3,3:3} nd = nx.average_neighbor_degree(G) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D) assert_equal(nd,answer) D=G.to_directed() nd = nx.average_neighbor_degree(D,source='in',target='in') assert_equal(nd,answer) def test_degree_k4_nodes(self): G=nx.complete_graph(4) answer={1:3.0,2:3.0} nd = nx.average_neighbor_degree(G,nodes=[1,2]) assert_equal(nd,answer) def test_degree_barrat(self): G=nx.star_graph(5) G.add_edges_from([(5,6),(5,7),(5,8),(5,9)]) G[0][5]['weight']=5 nd = nx.average_neighbor_degree(G)[5] assert_equal(nd,1.8) nd = nx.average_neighbor_degree(G,weight='weight')[5] assert_almost_equal(nd,3.222222,places=5) networkx-1.8.1/networkx/algorithms/assortativity/correlation.py0000664000175000017500000002060712177456333025154 0ustar aricaric00000000000000#-*- coding: utf-8 -*- """Node assortativity coefficients and correlation measures. """ import networkx as nx from networkx.algorithms.assortativity.mixing import degree_mixing_matrix, \ attribute_mixing_matrix, numeric_mixing_matrix from networkx.algorithms.assortativity.pairs import node_degree_xy, \ node_attribute_xy __author__ = ' '.join(['Aric Hagberg ', 'Oleguer Sagarra ']) __all__ = ['degree_pearson_correlation_coefficient', 'degree_assortativity_coefficient', 'attribute_assortativity_coefficient', 'numeric_assortativity_coefficient'] def degree_assortativity_coefficient(G, x='out', y='in', weight=None, nodes=None): """Compute degree assortativity of graph. Assortativity measures the similarity of connections in the graph with respect to the node degree. Parameters ---------- G : NetworkX graph x: string ('in','out') The degree type for source node (directed graphs only). y: string ('in','out') The degree type for target node (directed graphs only). weight: string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. nodes: list or iterable (optional) Compute degree assortativity only for nodes in container. The default is all nodes. Returns ------- r : float Assortativity of graph by degree. Examples -------- >>> G=nx.path_graph(4) >>> r=nx.degree_assortativity_coefficient(G) >>> print("%3.1f"%r) -0.5 See Also -------- attribute_assortativity_coefficient numeric_assortativity_coefficient neighbor_connectivity degree_mixing_dict degree_mixing_matrix Notes ----- This computes Eq. (21) in Ref. [1]_ , where e is the joint probability distribution (mixing matrix) of the degrees. If G is directed than the matrix e is the joint probability of the user-specified degree type for the source and target. References ---------- .. [1] M. E. J. Newman, Mixing patterns in networks, Physical Review E, 67 026126, 2003 .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). """ M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight) return numeric_ac(M) def degree_pearson_correlation_coefficient(G, x='out', y='in', weight=None, nodes=None): """Compute degree assortativity of graph. Assortativity measures the similarity of connections in the graph with respect to the node degree. This is the same as degree_assortativity_coefficient but uses the potentially faster scipy.stats.pearsonr function. Parameters ---------- G : NetworkX graph x: string ('in','out') The degree type for source node (directed graphs only). y: string ('in','out') The degree type for target node (directed graphs only). weight: string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. The degree is the sum of the edge weights adjacent to the node. nodes: list or iterable (optional) Compute pearson correlation of degrees only for specified nodes. The default is all nodes. Returns ------- r : float Assortativity of graph by degree. Examples -------- >>> G=nx.path_graph(4) >>> r=nx.degree_pearson_correlation_coefficient(G) >>> r -0.5 Notes ----- This calls scipy.stats.pearsonr. References ---------- .. [1] M. E. J. Newman, Mixing patterns in networks Physical Review E, 67 026126, 2003 .. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M. Edge direction and the structure of networks, PNAS 107, 10815-20 (2010). """ try: import scipy.stats as stats except ImportError: raise ImportError( "Assortativity requires SciPy: http://scipy.org/ ") xy=node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight) x,y=zip(*xy) return stats.pearsonr(x,y)[0] def attribute_assortativity_coefficient(G,attribute,nodes=None): """Compute assortativity for node attributes. Assortativity measures the similarity of connections in the graph with respect to the given attribute. Parameters ---------- G : NetworkX graph attribute : string Node attribute key nodes: list or iterable (optional) Compute attribute assortativity for nodes in container. The default is all nodes. Returns ------- r: float Assortativity of graph for given attribute Examples -------- >>> G=nx.Graph() >>> G.add_nodes_from([0,1],color='red') >>> G.add_nodes_from([2,3],color='blue') >>> G.add_edges_from([(0,1),(2,3)]) >>> print(nx.attribute_assortativity_coefficient(G,'color')) 1.0 Notes ----- This computes Eq. (2) in Ref. [1]_ , trace(M)-sum(M))/(1-sum(M), where M is the joint probability distribution (mixing matrix) of the specified attribute. References ---------- .. [1] M. E. J. Newman, Mixing patterns in networks, Physical Review E, 67 026126, 2003 """ M = attribute_mixing_matrix(G,attribute,nodes) return attribute_ac(M) def numeric_assortativity_coefficient(G, attribute, nodes=None): """Compute assortativity for numerical node attributes. Assortativity measures the similarity of connections in the graph with respect to the given numeric attribute. Parameters ---------- G : NetworkX graph attribute : string Node attribute key nodes: list or iterable (optional) Compute numeric assortativity only for attributes of nodes in container. The default is all nodes. Returns ------- r: float Assortativity of graph for given attribute Examples -------- >>> G=nx.Graph() >>> G.add_nodes_from([0,1],size=2) >>> G.add_nodes_from([2,3],size=3) >>> G.add_edges_from([(0,1),(2,3)]) >>> print(nx.numeric_assortativity_coefficient(G,'size')) 1.0 Notes ----- This computes Eq. (21) in Ref. [1]_ , for the mixing matrix of of the specified attribute. References ---------- .. [1] M. E. J. Newman, Mixing patterns in networks Physical Review E, 67 026126, 2003 """ a = numeric_mixing_matrix(G,attribute,nodes) return numeric_ac(a) def attribute_ac(M): """Compute assortativity for attribute matrix M. Parameters ---------- M : numpy array or matrix Attribute mixing matrix. Notes ----- This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e))/(1-sum(e)), where e is the joint probability distribution (mixing matrix) of the specified attribute. References ---------- .. [1] M. E. J. Newman, Mixing patterns in networks, Physical Review E, 67 026126, 2003 """ try: import numpy except ImportError: raise ImportError( "attribute_assortativity requires NumPy: http://scipy.org/ ") if M.sum() != 1.0: M=M/float(M.sum()) M=numpy.asmatrix(M) s=(M*M).sum() t=M.trace() r=(t-s)/(1-s) return float(r) def numeric_ac(M): # M is a numpy matrix or array # numeric assortativity coefficient, pearsonr try: import numpy except ImportError: raise ImportError('numeric_assortativity requires ', 'NumPy: http://scipy.org/') if M.sum() != 1.0: M=M/float(M.sum()) nx,ny=M.shape # nx=ny x=numpy.arange(nx) y=numpy.arange(ny) a=M.sum(axis=0) b=M.sum(axis=1) vara=(a*x**2).sum()-((a*x).sum())**2 varb=(b*x**2).sum()-((b*x).sum())**2 xy=numpy.outer(x,y) ab=numpy.outer(a,b) return (xy*(M-ab)).sum()/numpy.sqrt(vara*varb) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/algorithms/cluster.py0000664000175000017500000002532512177456333021371 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Algorithms to characterize the number of triangles in a graph.""" from itertools import combinations import networkx as nx from networkx import NetworkXError __author__ = """\n""".join(['Aric Hagberg ', 'Dan Schult (dschult@colgate.edu)', 'Pieter Swart (swart@lanl.gov)', 'Jordi Torrents ']) # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__= ['triangles', 'average_clustering', 'clustering', 'transitivity', 'square_clustering'] def triangles(G, nodes=None): """Compute the number of triangles. Finds the number of triangles that include a node as one vertex. Parameters ---------- G : graph A networkx graph nodes : container of nodes, optional (default= all nodes in G) Compute triangles for nodes in this container. Returns ------- out : dictionary Number of triangles keyed by node label. Examples -------- >>> G=nx.complete_graph(5) >>> print(nx.triangles(G,0)) 6 >>> print(nx.triangles(G)) {0: 6, 1: 6, 2: 6, 3: 6, 4: 6} >>> print(list(nx.triangles(G,(0,1)).values())) [6, 6] Notes ----- When computing triangles for the entire graph each triangle is counted three times, once at each node. Self loops are ignored. """ if G.is_directed(): raise NetworkXError("triangles() is not defined for directed graphs.") if nodes in G: # return single value return next(_triangles_and_degree_iter(G,nodes))[2] // 2 return dict( (v,t // 2) for v,d,t in _triangles_and_degree_iter(G,nodes)) def _triangles_and_degree_iter(G,nodes=None): """ Return an iterator of (node, degree, triangles). This double counts triangles so you may want to divide by 2. See degree() and triangles() for definitions and details. """ if G.is_multigraph(): raise NetworkXError("Not defined for multigraphs.") if nodes is None: nodes_nbrs = G.adj.items() else: nodes_nbrs= ( (n,G[n]) for n in G.nbunch_iter(nodes) ) for v,v_nbrs in nodes_nbrs: vs=set(v_nbrs)-set([v]) ntriangles=0 for w in vs: ws=set(G[w])-set([w]) ntriangles+=len(vs.intersection(ws)) yield (v,len(vs),ntriangles) def _weighted_triangles_and_degree_iter(G, nodes=None, weight='weight'): """ Return an iterator of (node, degree, weighted_triangles). Used for weighted clustering. """ if G.is_multigraph(): raise NetworkXError("Not defined for multigraphs.") if weight is None or G.edges()==[]: max_weight=1.0 else: max_weight=float(max(d.get(weight,1.0) for u,v,d in G.edges(data=True))) if nodes is None: nodes_nbrs = G.adj.items() else: nodes_nbrs= ( (n,G[n]) for n in G.nbunch_iter(nodes) ) for i,nbrs in nodes_nbrs: inbrs=set(nbrs)-set([i]) weighted_triangles=0.0 seen=set() for j in inbrs: wij=G[i][j].get(weight,1.0)/max_weight seen.add(j) jnbrs=set(G[j])-seen # this keeps from double counting for k in inbrs&jnbrs: wjk=G[j][k].get(weight,1.0)/max_weight wki=G[i][k].get(weight,1.0)/max_weight weighted_triangles+=(wij*wjk*wki)**(1.0/3.0) yield (i,len(inbrs),weighted_triangles*2) def average_clustering(G, nodes=None, weight=None, count_zeros=True): r"""Compute the average clustering coefficient for the graph G. The clustering coefficient for the graph is the average, .. math:: C = \frac{1}{n}\sum_{v \in G} c_v, where `n` is the number of nodes in `G`. Parameters ---------- G : graph nodes : container of nodes, optional (default=all nodes in G) Compute average clustering for nodes in this container. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. count_zeros : bool (default=False) If False include only the nodes with nonzero clustering in the average. Returns ------- avg : float Average clustering Examples -------- >>> G=nx.complete_graph(5) >>> print(nx.average_clustering(G)) 1.0 Notes ----- This is a space saving routine; it might be faster to use the clustering function to get a list and then take the average. Self loops are ignored. References ---------- .. [1] Generalizations of the clustering coefficient to weighted complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela, K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007). http://jponnela.com/web_documents/a9.pdf .. [2] Marcus Kaiser, Mean clustering coefficients: the role of isolated nodes and leafs on clustering measures for small-world networks. http://arxiv.org/abs/0802.2512 """ c=clustering(G,nodes,weight=weight).values() if not count_zeros: c = [v for v in c if v > 0] return sum(c)/float(len(c)) def clustering(G, nodes=None, weight=None): r"""Compute the clustering coefficient for nodes. For unweighted graphs, the clustering of a node `u` is the fraction of possible triangles through that node that exist, .. math:: c_u = \frac{2 T(u)}{deg(u)(deg(u)-1)}, where `T(u)` is the number of triangles through node `u` and `deg(u)` is the degree of `u`. For weighted graphs, the clustering is defined as the geometric average of the subgraph edge weights [1]_, .. math:: c_u = \frac{1}{deg(u)(deg(u)-1))} \sum_{uv} (\hat{w}_{uv} \hat{w}_{uw} \hat{w}_{vw})^{1/3}. The edge weights `\hat{w}_{uv}` are normalized by the maximum weight in the network `\hat{w}_{uv} = w_{uv}/\max(w)`. The value of `c_u` is assigned to 0 if `deg(u) < 2`. Parameters ---------- G : graph nodes : container of nodes, optional (default=all nodes in G) Compute clustering for nodes in this container. weight : string or None, optional (default=None) The edge attribute that holds the numerical value used as a weight. If None, then each edge has weight 1. Returns ------- out : float, or dictionary Clustering coefficient at specified nodes Examples -------- >>> G=nx.complete_graph(5) >>> print(nx.clustering(G,0)) 1.0 >>> print(nx.clustering(G)) {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0} Notes ----- Self loops are ignored. References ---------- .. [1] Generalizations of the clustering coefficient to weighted complex networks by J. Saramäki, M. Kivelä, J.-P. Onnela, K. Kaski, and J. Kertész, Physical Review E, 75 027105 (2007). http://jponnela.com/web_documents/a9.pdf """ if G.is_directed(): raise NetworkXError('Clustering algorithms are not defined ', 'for directed graphs.') if weight is not None: td_iter=_weighted_triangles_and_degree_iter(G,nodes,weight) else: td_iter=_triangles_and_degree_iter(G,nodes) clusterc={} for v,d,t in td_iter: if t==0: clusterc[v]=0.0 else: clusterc[v]=t/float(d*(d-1)) if nodes in G: return list(clusterc.values())[0] # return single value return clusterc def transitivity(G): r"""Compute graph transitivity, the fraction of all possible triangles present in G. Possible triangles are identified by the number of "triads" (two edges with a shared vertex). The transitivity is .. math:: T = 3\frac{\#triangles}{\#triads}. Parameters ---------- G : graph Returns ------- out : float Transitivity Examples -------- >>> G = nx.complete_graph(5) >>> print(nx.transitivity(G)) 1.0 """ triangles=0 # 6 times number of triangles contri=0 # 2 times number of connected triples for v,d,t in _triangles_and_degree_iter(G): contri += d*(d-1) triangles += t if triangles==0: # we had no triangles or possible triangles return 0.0 else: return triangles/float(contri) def square_clustering(G, nodes=None): r""" Compute the squares clustering coefficient for nodes. For each node return the fraction of possible squares that exist at the node [1]_ .. math:: C_4(v) = \frac{ \sum_{u=1}^{k_v} \sum_{w=u+1}^{k_v} q_v(u,w) }{ \sum_{u=1}^{k_v} \sum_{w=u+1}^{k_v} [a_v(u,w) + q_v(u,w)]}, where `q_v(u,w)` are the number of common neighbors of `u` and `w` other than `v` (ie squares), and `a_v(u,w) = (k_u - (1+q_v(u,w)+\theta_{uv}))(k_w - (1+q_v(u,w)+\theta_{uw}))`, where `\theta_{uw} = 1` if `u` and `w` are connected and 0 otherwise. Parameters ---------- G : graph nodes : container of nodes, optional (default=all nodes in G) Compute clustering for nodes in this container. Returns ------- c4 : dictionary A dictionary keyed by node with the square clustering coefficient value. Examples -------- >>> G=nx.complete_graph(5) >>> print(nx.square_clustering(G,0)) 1.0 >>> print(nx.square_clustering(G)) {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0} Notes ----- While `C_3(v)` (triangle clustering) gives the probability that two neighbors of node v are connected with each other, `C_4(v)` is the probability that two neighbors of node v share a common neighbor different from v. This algorithm can be applied to both bipartite and unipartite networks. References ---------- .. [1] Pedro G. Lind, Marta C. González, and Hans J. Herrmann. 2005 Cycles and clustering in bipartite networks. Physical Review E (72) 056127. """ if nodes is None: node_iter = G else: node_iter = G.nbunch_iter(nodes) clustering = {} for v in node_iter: clustering[v] = 0.0 potential=0 for u,w in combinations(G[v], 2): squares = len((set(G[u]) & set(G[w])) - set([v])) clustering[v] += squares degm = squares + 1.0 if w in G[u]: degm += 1 potential += (len(G[u]) - degm) * (len(G[w]) - degm) + squares if potential > 0: clustering[v] /= potential if nodes in G: return list(clustering.values())[0] # return single value return clustering networkx-1.8.1/networkx/algorithms/distance_regular.py0000664000175000017500000001244112177456333023216 0ustar aricaric00000000000000""" ======================= Distance-regular graphs ======================= """ # Copyright (C) 2011 by # Dheeraj M R # Aric Hagberg # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Dheeraj M R ', 'Aric Hagberg ']) __all__ = ['is_distance_regular','intersection_array','global_parameters'] def is_distance_regular(G): """Returns True if the graph is distance regular, False otherwise. A connected graph G is distance-regular if for any nodes x,y and any integers i,j=0,1,...,d (where d is the graph diameter), the number of vertices at distance i from x and distance j from y depends only on i,j and the graph distance between x and y, independently of the choice of x and y. Parameters ---------- G: Networkx graph (undirected) Returns ------- bool True if the graph is Distance Regular, False otherwise Examples -------- >>> G=nx.hypercube_graph(6) >>> nx.is_distance_regular(G) True See Also -------- intersection_array, global_parameters Notes ----- For undirected and simple graphs only References ---------- .. [1] Brouwer, A. E.; Cohen, A. M.; and Neumaier, A. Distance-Regular Graphs. New York: Springer-Verlag, 1989. .. [2] Weisstein, Eric W. "Distance-Regular Graph." http://mathworld.wolfram.com/Distance-RegularGraph.html """ try: a=intersection_array(G) return True except nx.NetworkXError: return False def global_parameters(b,c): """Return global parameters for a given intersection array. Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d such that for any 2 vertices x,y in G at a distance i=d(x,y), there are exactly c_i neighbors of y at a distance of i-1 from x and b_i neighbors of y at a distance of i+1 from x. Thus, a distance regular graph has the global parameters, [[c_0,a_0,b_0],[c_1,a_1,b_1],......,[c_d,a_d,b_d]] for the intersection array [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] where a_i+b_i+c_i=k , k= degree of every vertex. Parameters ---------- b,c: tuple of lists Returns ------- p : list of three-tuples Examples -------- >>> G=nx.dodecahedral_graph() >>> b,c=nx.intersection_array(G) >>> list(nx.global_parameters(b,c)) [(0, 0, 3), (1, 0, 2), (1, 1, 1), (1, 1, 1), (2, 0, 1), (3, 0, 0)] References ---------- .. [1] Weisstein, Eric W. "Global Parameters." From MathWorld--A Wolfram Web Resource. http://mathworld.wolfram.com/GlobalParameters.html See Also -------- intersection_array """ d=len(b) ba=b[:] ca=c[:] ba.append(0) ca.insert(0,0) k = ba[0] aa = [k-x-y for x,y in zip(ba,ca)] return zip(*[ca,aa,ba]) def intersection_array(G): """Returns the intersection array of a distance-regular graph. Given a distance-regular graph G with integers b_i, c_i,i = 0,....,d such that for any 2 vertices x,y in G at a distance i=d(x,y), there are exactly c_i neighbors of y at a distance of i-1 from x and b_i neighbors of y at a distance of i+1 from x. A distance regular graph'sintersection array is given by, [b_0,b_1,.....b_{d-1};c_1,c_2,.....c_d] Parameters ---------- G: Networkx graph (undirected) Returns ------- b,c: tuple of lists Examples -------- >>> G=nx.icosahedral_graph() >>> nx.intersection_array(G) ([5, 2, 1], [1, 2, 5]) References ---------- .. [1] Weisstein, Eric W. "Intersection Array." From MathWorld--A Wolfram Web Resource. http://mathworld.wolfram.com/IntersectionArray.html See Also -------- global_parameters """ if G.is_multigraph() or G.is_directed(): raise nx.NetworkxException('Not implemented for directed ', 'or multiedge graphs.') # test for regular graph (all degrees must be equal) degree = G.degree_iter() (_,k) = next(degree) for _,knext in degree: if knext != k: raise nx.NetworkXError('Graph is not distance regular.') k = knext path_length = nx.all_pairs_shortest_path_length(G) diameter = max([max(path_length[n].values()) for n in path_length]) bint = {} # 'b' intersection array cint = {} # 'c' intersection array for u in G: for v in G: try: i = path_length[u][v] except KeyError: # graph must be connected raise nx.NetworkXError('Graph is not distance regular.') # number of neighbors of v at a distance of i-1 from u c = len([n for n in G[v] if path_length[n][u]==i-1]) # number of neighbors of v at a distance of i+1 from u b = len([n for n in G[v] if path_length[n][u]==i+1]) # b,c are independent of u and v if cint.get(i,c) != c or bint.get(i,b) != b: raise nx.NetworkXError('Graph is not distance regular') bint[i] = b cint[i] = c return ([bint.get(i,0) for i in range(diameter)], [cint.get(i+1,0) for i in range(diameter)]) networkx-1.8.1/networkx/algorithms/approximation/0000775000175000017500000000000012177457361022223 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/approximation/ramsey.py0000664000175000017500000000155612177456333024102 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Ramsey numbers. """ # Copyright (C) 2011 by # Nicholas Mancuso # All rights reserved. # BSD license. import networkx as nx __all__ = ["ramsey_R2"] __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def ramsey_R2(G): r"""Approximately computes the Ramsey number `R(2;s,t)` for graph. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- max_pair : (set, set) tuple Maximum clique, Maximum independent set. """ if not G: return (set([]), set([])) node = next(G.nodes_iter()) nbrs = nx.all_neighbors(G, node) nnbrs = nx.non_neighbors(G, node) c_1, i_1 = ramsey_R2(G.subgraph(nbrs)) c_2, i_2 = ramsey_R2(G.subgraph(nnbrs)) c_1.add(node) i_2.add(node) return (max([c_1, c_2]), max([i_1, i_2])) networkx-1.8.1/networkx/algorithms/approximation/__init__.py0000664000175000017500000000054312177456333024334 0ustar aricaric00000000000000from networkx.algorithms.approximation.clique import * from networkx.algorithms.approximation.dominating_set import * from networkx.algorithms.approximation.independent_set import * from networkx.algorithms.approximation.matching import * from networkx.algorithms.approximation.ramsey import * from networkx.algorithms.approximation.vertex_cover import * networkx-1.8.1/networkx/algorithms/approximation/matching.py0000664000175000017500000000246312177456333024372 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ************** Graph Matching ************** Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent edges; that is, no two edges share a common vertex. http://en.wikipedia.org/wiki/Matching_(graph_theory) """ # Copyright (C) 2011-2012 by # Nicholas Mancuso # All rights reserved. # BSD license. import networkx as nx __all__ = ["min_maximal_matching"] __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def min_maximal_matching(G): r"""Returns the minimum maximal matching of G. That is, out of all maximal matchings of the graph G, the smallest is returned. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- min_maximal_matching : set Returns a set of edges such that no two edges share a common endpoint and every edge not in the set shares some common endpoint in the set. Cardinality will be 2*OPT in the worst case. Notes ----- The algorithm computes an approximate solution fo the minimum maximal cardinality matching problem. The solution is no more than 2 * OPT in size. Runtime is `O(|E|)`. References ---------- .. [1] Vazirani, Vijay Approximation Algorithms (2001) """ return nx.maximal_matching(G) networkx-1.8.1/networkx/algorithms/approximation/independent_set.py0000664000175000017500000000371512177456333025751 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Independent Set Independent set or stable set is a set of vertices in a graph, no two of which are adjacent. That is, it is a set I of vertices such that for every two vertices in I, there is no edge connecting the two. Equivalently, each edge in the graph has at most one endpoint in I. The size of an independent set is the number of vertices it contains. A maximum independent set is a largest independent set for a given graph G and its size is denoted α(G). The problem of finding such a set is called the maximum independent set problem and is an NP-hard optimization problem. As such, it is unlikely that there exists an efficient algorithm for finding a maximum independent set of a graph. http://en.wikipedia.org/wiki/Independent_set_(graph_theory) Independent set algorithm is based on the following paper: `O(|V|/(log|V|)^2)` apx of maximum clique/independent set. Boppana, R., & Halldórsson, M. M. (1992). Approximating maximum independent sets by excluding subgraphs. BIT Numerical Mathematics, 32(2), 180–196. Springer. doi:10.1007/BF01994876 """ # Copyright (C) 2011-2012 by # Nicholas Mancuso # All rights reserved. # BSD license. from networkx.algorithms.approximation import clique_removal __all__ = ["maximum_independent_set"] __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def maximum_independent_set(G): """Return an approximate maximum independent set. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- iset : Set The apx-maximum independent set Notes ----- Finds the `O(|V|/(log|V|)^2)` apx of independent set in the worst case. References ---------- .. [1] Boppana, R., & Halldórsson, M. M. (1992). Approximating maximum independent sets by excluding subgraphs. BIT Numerical Mathematics, 32(2), 180–196. Springer. """ iset, _ = clique_removal(G) return iset networkx-1.8.1/networkx/algorithms/approximation/tests/0000775000175000017500000000000012177457361023365 5ustar aricaric00000000000000networkx-1.8.1/networkx/algorithms/approximation/tests/test_dominating_set.py0000664000175000017500000000340312177456333030000 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx import networkx.algorithms.approximation as apxa class TestMinWeightDominatingSet: def test_min_weighted_dominating_set(self): graph = nx.Graph() graph.add_edge(1, 2) graph.add_edge(1, 5) graph.add_edge(2, 3) graph.add_edge(2, 5) graph.add_edge(3, 4) graph.add_edge(3, 6) graph.add_edge(5, 6) vertices = set([1, 2, 3, 4, 5, 6]) # due to ties, this might be hard to test tight bounds dom_set = apxa.min_weighted_dominating_set(graph) for vertex in vertices - dom_set: neighbors = set(graph.neighbors(vertex)) ok_(len(neighbors & dom_set) > 0, "Non dominating set found!") def test_min_edge_dominating_set(self): graph = nx.path_graph(5) dom_set = apxa.min_edge_dominating_set(graph) # this is a crappy way to test, but good enough for now. for edge in graph.edges_iter(): if edge in dom_set: continue else: u, v = edge found = False for dom_edge in dom_set: found |= u == dom_edge[0] or u == dom_edge[1] ok_(found, "Non adjacent edge found!") graph = nx.complete_graph(10) dom_set = apxa.min_edge_dominating_set(graph) # this is a crappy way to test, but good enough for now. for edge in graph.edges_iter(): if edge in dom_set: continue else: u, v = edge found = False for dom_edge in dom_set: found |= u == dom_edge[0] or u == dom_edge[1] ok_(found, "Non adjacent edge found!") networkx-1.8.1/networkx/algorithms/approximation/tests/test_clique.py0000664000175000017500000000261412177456333026261 0ustar aricaric00000000000000from nose.tools import * import networkx as nx import networkx.algorithms.approximation as apxa def test_clique_removal(): graph = nx.complete_graph(10) i, cs = apxa.clique_removal(graph) idens = nx.density(graph.subgraph(i)) eq_(idens, 0.0, "i-set not found by clique_removal!") for clique in cs: cdens = nx.density(graph.subgraph(clique)) eq_(cdens, 1.0, "clique not found by clique_removal!") graph = nx.trivial_graph(nx.Graph()) i, cs = apxa.clique_removal(graph) idens = nx.density(graph.subgraph(i)) eq_(idens, 0.0, "i-set not found by ramsey!") # we should only have 1-cliques. Just singleton nodes. for clique in cs: cdens = nx.density(graph.subgraph(clique)) eq_(cdens, 0.0, "clique not found by clique_removal!") graph = nx.barbell_graph(10, 5, nx.Graph()) i, cs = apxa.clique_removal(graph) idens = nx.density(graph.subgraph(i)) eq_(idens, 0.0, "i-set not found by ramsey!") for clique in cs: cdens = nx.density(graph.subgraph(clique)) eq_(cdens, 1.0, "clique not found by clique_removal!") def test_max_clique_smoke(): # smoke test G = nx.Graph() assert_equal(len(apxa.max_clique(G)),0) def test_max_clique(): # create a complete graph graph = nx.complete_graph(30) # this should return the entire graph mc = apxa.max_clique(graph) assert_equals(30, len(mc)) networkx-1.8.1/networkx/algorithms/approximation/tests/test_ramsey.py0000664000175000017500000000175612177456333026305 0ustar aricaric00000000000000from nose.tools import * import networkx as nx import networkx.algorithms.approximation as apxa def test_ramsey(): # this should only find the complete graph graph = nx.complete_graph(10) c, i = apxa.ramsey_R2(graph) cdens = nx.density(graph.subgraph(c)) eq_(cdens, 1.0, "clique not found by ramsey!") idens = nx.density(graph.subgraph(i)) eq_(idens, 0.0, "i-set not found by ramsey!") # this trival graph has no cliques. should just find i-sets graph = nx.trivial_graph(nx.Graph()) c, i = apxa.ramsey_R2(graph) cdens = nx.density(graph.subgraph(c)) eq_(cdens, 0.0, "clique not found by ramsey!") idens = nx.density(graph.subgraph(i)) eq_(idens, 0.0, "i-set not found by ramsey!") graph = nx.barbell_graph(10, 5, nx.Graph()) c, i = apxa.ramsey_R2(graph) cdens = nx.density(graph.subgraph(c)) eq_(cdens, 1.0, "clique not found by ramsey!") idens = nx.density(graph.subgraph(i)) eq_(idens, 0.0, "i-set not found by ramsey!") networkx-1.8.1/networkx/algorithms/approximation/tests/test_independent_set.py0000664000175000017500000000032412177456333030143 0ustar aricaric00000000000000from nose.tools import * import networkx as nx import networkx.algorithms.approximation as a def test_independent_set(): # smoke test G = nx.Graph() assert_equal(len(a.maximum_independent_set(G)),0) networkx-1.8.1/networkx/algorithms/approximation/tests/test_vertex_cover.py0000664000175000017500000000213012177456333027503 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx from networkx.algorithms import approximation as a class TestMWVC: def test_min_vertex_cover(self): # create a simple star graph size = 50 sg = nx.star_graph(size) cover = a.min_weighted_vertex_cover(sg) assert_equals(2, len(cover)) for u, v in sg.edges_iter(): ok_((u in cover or v in cover), "Node node covered!") wg = nx.Graph() wg.add_node(0, weight=10) wg.add_node(1, weight=1) wg.add_node(2, weight=1) wg.add_node(3, weight=1) wg.add_node(4, weight=1) wg.add_edge(0, 1) wg.add_edge(0, 2) wg.add_edge(0, 3) wg.add_edge(0, 4) wg.add_edge(1,2) wg.add_edge(2,3) wg.add_edge(3,4) wg.add_edge(4,1) cover = a.min_weighted_vertex_cover(wg, weight="weight") csum = sum(wg.node[node]["weight"] for node in cover) assert_equals(4, csum) for u, v in wg.edges_iter(): ok_((u in cover or v in cover), "Node node covered!") networkx-1.8.1/networkx/algorithms/approximation/tests/test_matching.py0000664000175000017500000000032612177456333026567 0ustar aricaric00000000000000from nose.tools import * import networkx as nx import networkx.algorithms.approximation as a def test_min_maximal_matching(): # smoke test G = nx.Graph() assert_equal(len(a.min_maximal_matching(G)),0) networkx-1.8.1/networkx/algorithms/approximation/dominating_set.py0000664000175000017500000000677112177456333025612 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ************************************** Minimum Vertex and Edge Dominating Set ************************************** A dominating set for a graph G = (V, E) is a subset D of V such that every vertex not in D is joined to at least one member of D by some edge. The domination number gamma(G) is the number of vertices in a smallest dominating set for G. Given a graph G = (V, E) find a minimum weight dominating set V'. http://en.wikipedia.org/wiki/Dominating_set An edge dominating set for a graph G = (V, E) is a subset D of E such that every edge not in D is adjacent to at least one edge in D. http://en.wikipedia.org/wiki/Edge_dominating_set """ # Copyright (C) 2011-2012 by # Nicholas Mancuso # All rights reserved. # BSD license. import networkx as nx __all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"] __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" def min_weighted_dominating_set(G, weight=None): r"""Return minimum weight vertex dominating set. Parameters ---------- G : NetworkX graph Undirected graph weight : None or string, optional (default = None) If None, every edge has weight/distance/weight 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Returns ------- min_weight_dominating_set : set Returns a set of vertices whose weight sum is no more than log w(V) * OPT Notes ----- This algorithm computes an approximate minimum weighted dominating set for the graph G. The upper-bound on the size of the solution is log w(V) * OPT. Runtime of the algorithm is `O(|E|)`. References ---------- .. [1] Vazirani, Vijay Approximation Algorithms (2001) """ if not G: raise ValueError("Expected non-empty NetworkX graph!") # min cover = min dominating set dom_set = set([]) cost_func = dict((node, nd.get(weight, 1)) \ for node, nd in G.nodes_iter(data=True)) vertices = set(G) sets = dict((node, set([node]) | set(G[node])) for node in G) def _cost(subset): """ Our cost effectiveness function for sets given its weight """ cost = sum(cost_func[node] for node in subset) return cost / float(len(subset - dom_set)) while vertices: # find the most cost effective set, and the vertex that for that set dom_node, min_set = min(sets.items(), key=lambda x: (x[0], _cost(x[1]))) alpha = _cost(min_set) # reduce the cost for the rest for node in min_set - dom_set: cost_func[node] = alpha # add the node to the dominating set and reduce what we must cover dom_set.add(dom_node) del sets[dom_node] vertices = vertices - min_set return dom_set def min_edge_dominating_set(G): r"""Return minimum cardinality edge dominating set. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- min_edge_dominating_set : set Returns a set of dominating edges whose size is no more than 2 * OPT. Notes ----- The algorithm computes an approximate solution to the edge dominating set problem. The result is no more than 2 * OPT in terms of size of the set. Runtime of the algorithm is `O(|E|)`. """ if not G: raise ValueError("Expected non-empty NetworkX graph!") return nx.maximal_matching(G) networkx-1.8.1/networkx/algorithms/approximation/clique.py0000664000175000017500000000554412177456333024065 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Cliques. """ # Copyright (C) 2011-2012 by # Nicholas Mancuso # All rights reserved. # BSD license. import networkx as nx from networkx.algorithms.approximation import ramsey __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" __all__ = ["clique_removal","max_clique"] def max_clique(G): r"""Find the Maximum Clique Finds the `O(|V|/(log|V|)^2)` apx of maximum clique/independent set in the worst case. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- clique : set The apx-maximum clique of the graph Notes ------ A clique in an undirected graph G = (V, E) is a subset of the vertex set `C \subseteq V`, such that for every two vertices in C, there exists an edge connecting the two. This is equivalent to saying that the subgraph induced by C is complete (in some cases, the term clique may also refer to the subgraph). A maximum clique is a clique of the largest possible size in a given graph. The clique number `\omega(G)` of a graph G is the number of vertices in a maximum clique in G. The intersection number of G is the smallest number of cliques that together cover all edges of G. http://en.wikipedia.org/wiki/Maximum_clique References ---------- .. [1] Boppana, R., & Halldórsson, M. M. (1992). Approximating maximum independent sets by excluding subgraphs. BIT Numerical Mathematics, 32(2), 180–196. Springer. doi:10.1007/BF01994876 """ if G is None: raise ValueError("Expected NetworkX graph!") # finding the maximum clique in a graph is equivalent to finding # the independent set in the complementary graph cgraph = nx.complement(G) iset, _ = clique_removal(cgraph) return iset def clique_removal(G): """ Repeatedly remove cliques from the graph. Results in a `O(|V|/(\log |V|)^2)` approximation of maximum clique & independent set. Returns the largest independent set found, along with found maximal cliques. Parameters ---------- G : NetworkX graph Undirected graph Returns ------- max_ind_cliques : (set, list) tuple Maximal independent set and list of maximal cliques (sets) in the graph. References ---------- .. [1] Boppana, R., & Halldórsson, M. M. (1992). Approximating maximum independent sets by excluding subgraphs. BIT Numerical Mathematics, 32(2), 180–196. Springer. """ graph = G.copy() c_i, i_i = ramsey.ramsey_R2(graph) cliques = [c_i] isets = [i_i] while graph: graph.remove_nodes_from(c_i) c_i, i_i = ramsey.ramsey_R2(graph) if c_i: cliques.append(c_i) if i_i: isets.append(i_i) maxiset = max(isets) return maxiset, cliques networkx-1.8.1/networkx/algorithms/approximation/vertex_cover.py0000664000175000017500000000405012177456333025305 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ ************ Vertex Cover ************ Given an undirected graph `G = (V, E)` and a function w assigning nonnegative weights to its vertices, find a minimum weight subset of V such that each edge in E is incident to at least one vertex in the subset. http://en.wikipedia.org/wiki/Vertex_cover """ # Copyright (C) 2011-2012 by # Nicholas Mancuso # All rights reserved. # BSD license. from networkx.utils import * __all__ = ["min_weighted_vertex_cover"] __author__ = """Nicholas Mancuso (nick.mancuso@gmail.com)""" @not_implemented_for('directed') def min_weighted_vertex_cover(G, weight=None): r"""2-OPT Local Ratio for Minimum Weighted Vertex Cover Find an approximate minimum weighted vertex cover of a graph. Parameters ---------- G : NetworkX graph Undirected graph weight : None or string, optional (default = None) If None, every edge has weight/distance/cost 1. If a string, use this edge attribute as the edge weight. Any edge attribute not present defaults to 1. Returns ------- min_weighted_cover : set Returns a set of vertices whose weight sum is no more than 2 * OPT. Notes ----- Local-Ratio algorithm for computing an approximate vertex cover. Algorithm greedily reduces the costs over edges and iteratively builds a cover. Worst-case runtime is `O(|E|)`. References ---------- .. [1] Bar-Yehuda, R., & Even, S. (1985). A local-ratio theorem for approximating the weighted vertex cover problem. Annals of Discrete Mathematics, 25, 27–46 http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf """ weight_func = lambda nd: nd.get(weight, 1) cost = dict((n, weight_func(nd)) for n, nd in G.nodes(data=True)) # while there are edges uncovered, continue for u,v in G.edges_iter(): # select some uncovered edge min_cost = min([cost[u], cost[v]]) cost[u] -= min_cost cost[v] -= min_cost return set(u for u in cost if cost[u] == 0) networkx-1.8.1/networkx/drawing/0000775000175000017500000000000012177457361016613 5ustar aricaric00000000000000networkx-1.8.1/networkx/drawing/layout.py0000664000175000017500000003736712177456333020520 0ustar aricaric00000000000000""" ****** Layout ****** Node positioning algorithms for graph drawing. """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult(dschult@colgate.edu)""" __all__ = ['circular_layout', 'random_layout', 'shell_layout', 'spring_layout', 'spectral_layout', 'fruchterman_reingold_layout'] def random_layout(G,dim=2): """Position nodes uniformly at random in the unit square. For every node, a position is generated by choosing each of dim coordinates uniformly at random on the interval [0.0, 1.0). NumPy (http://scipy.org) is required for this function. Parameters ---------- G : NetworkX graph A position will be assigned to every node in G. dim : int Dimension of layout. Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G = nx.lollipop_graph(4, 3) >>> pos = nx.random_layout(G) """ try: import numpy as np except ImportError: raise ImportError("random_layout() requires numpy: http://scipy.org/ ") n=len(G) pos=np.asarray(np.random.random((n,dim)),dtype=np.float32) return dict(zip(G,pos)) def circular_layout(G, dim=2, scale=1): # dim=2 only """Position nodes on a circle. Parameters ---------- G : NetworkX graph dim : int Dimension of layout, currently only dim=2 is supported scale : float Scale factor for positions Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> pos=nx.circular_layout(G) Notes ------ This algorithm currently only works in two dimensions and does not try to minimize edge crossings. """ try: import numpy as np except ImportError: raise ImportError("circular_layout() requires numpy: http://scipy.org/ ") if len(G)==0: return {} if len(G)==1: return {G.nodes()[0]:(1,)*dim} t=np.arange(0,2.0*np.pi,2.0*np.pi/len(G),dtype=np.float32) pos=np.transpose(np.array([np.cos(t),np.sin(t)])) pos=_rescale_layout(pos,scale=scale) return dict(zip(G,pos)) def shell_layout(G,nlist=None,dim=2,scale=1): """Position nodes in concentric circles. Parameters ---------- G : NetworkX graph nlist : list of lists List of node lists for each shell. dim : int Dimension of layout, currently only dim=2 is supported scale : float Scale factor for positions Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> shells=[[0],[1,2,3]] >>> pos=nx.shell_layout(G,shells) Notes ------ This algorithm currently only works in two dimensions and does not try to minimize edge crossings. """ try: import numpy as np except ImportError: raise ImportError("shell_layout() requires numpy: http://scipy.org/ ") if len(G)==0: return {} if len(G)==1: return {G.nodes()[0]:(1,)*dim} if nlist==None: nlist=[G.nodes()] # draw the whole graph in one shell if len(nlist[0])==1: radius=0.0 # single node at center else: radius=1.0 # else start at r=1 npos={} for nodes in nlist: t=np.arange(0,2.0*np.pi,2.0*np.pi/len(nodes),dtype=np.float32) pos=np.transpose(np.array([radius*np.cos(t),radius*np.sin(t)])) npos.update(zip(nodes,pos)) radius+=1.0 # FIXME: rescale return npos def fruchterman_reingold_layout(G,dim=2,k=None, pos=None, fixed=None, iterations=50, weight='weight', scale=1.0): """Position nodes using Fruchterman-Reingold force-directed algorithm. Parameters ---------- G : NetworkX graph dim : int Dimension of layout k : float (default=None) Optimal distance between nodes. If None the distance is set to 1/sqrt(n) where n is the number of nodes. Increase this value to move nodes farther apart. pos : dict or None optional (default=None) Initial positions for nodes as a dictionary with node as keys and values as a list or tuple. If None, then nuse random initial positions. fixed : list or None optional (default=None) Nodes to keep fixed at initial position. iterations : int optional (default=50) Number of iterations of spring-force relaxation weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for the edge weight. If None, then all edge weights are 1. scale : float (default=1.0) Scale factor for positions. The nodes are positioned in a box of size [0,scale] x [0,scale]. Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> pos=nx.spring_layout(G) # The same using longer function name >>> pos=nx.fruchterman_reingold_layout(G) """ try: import numpy as np except ImportError: raise ImportError("fruchterman_reingold_layout() requires numpy: http://scipy.org/ ") if fixed is not None: nfixed=dict(zip(G,range(len(G)))) fixed=np.asarray([nfixed[v] for v in fixed]) if pos is not None: pos_arr=np.asarray(np.random.random((len(G),dim))) for i,n in enumerate(G): if n in pos: pos_arr[i]=np.asarray(pos[n]) else: pos_arr=None if len(G)==0: return {} if len(G)==1: return {G.nodes()[0]:(1,)*dim} try: # Sparse matrix if len(G) < 500: # sparse solver for large graphs raise ValueError A=nx.to_scipy_sparse_matrix(G,weight=weight,dtype='f') pos=_sparse_fruchterman_reingold(A,dim,k,pos_arr,fixed,iterations) except: A=nx.to_numpy_matrix(G,weight=weight) pos=_fruchterman_reingold(A,dim,k,pos_arr,fixed,iterations) if fixed is None: pos=_rescale_layout(pos,scale=scale) return dict(zip(G,pos)) spring_layout=fruchterman_reingold_layout def _fruchterman_reingold(A, dim=2, k=None, pos=None, fixed=None, iterations=50): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() try: import numpy as np except ImportError: raise ImportError("_fruchterman_reingold() requires numpy: http://scipy.org/ ") try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError( "fruchterman_reingold() takes an adjacency matrix as input") A=np.asarray(A) # make sure we have an array instead of a matrix if pos==None: # random initial positions pos=np.asarray(np.random.random((nnodes,dim)),dtype=A.dtype) else: # make sure positions are of same type as matrix pos=pos.astype(A.dtype) # optimal distance between nodes if k is None: k=np.sqrt(1.0/nnodes) # the initial "temperature" is about .1 of domain area (=1x1) # this is the largest step allowed in the dynamics. t=0.1 # simple cooling scheme. # linearly step down by dt on each iteration so last iteration is size dt. dt=t/float(iterations+1) delta = np.zeros((pos.shape[0],pos.shape[0],pos.shape[1]),dtype=A.dtype) # the inscrutable (but fast) version # this is still O(V^2) # could use multilevel methods to speed this up significantly for iteration in range(iterations): # matrix of difference between points for i in range(pos.shape[1]): delta[:,:,i]= pos[:,i,None]-pos[:,i] # distance between points distance=np.sqrt((delta**2).sum(axis=-1)) # enforce minimum distance of 0.01 distance=np.where(distance<0.01,0.01,distance) # displacement "force" displacement=np.transpose(np.transpose(delta)*\ (k*k/distance**2-A*distance/k))\ .sum(axis=1) # update positions length=np.sqrt((displacement**2).sum(axis=1)) length=np.where(length<0.01,0.1,length) delta_pos=np.transpose(np.transpose(displacement)*t/length) if fixed is not None: # don't change positions of fixed nodes delta_pos[fixed]=0.0 pos+=delta_pos # cool temperature t-=dt pos=_rescale_layout(pos) return pos def _sparse_fruchterman_reingold(A, dim=2, k=None, pos=None, fixed=None, iterations=50): # Position nodes in adjacency matrix A using Fruchterman-Reingold # Entry point for NetworkX graph is fruchterman_reingold_layout() # Sparse version try: import numpy as np except ImportError: raise ImportError("_sparse_fruchterman_reingold() requires numpy: http://scipy.org/ ") try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError( "fruchterman_reingold() takes an adjacency matrix as input") try: from scipy.sparse import spdiags,coo_matrix except ImportError: raise ImportError("_sparse_fruchterman_reingold() scipy numpy: http://scipy.org/ ") # make sure we have a LIst of Lists representation try: A=A.tolil() except: A=(coo_matrix(A)).tolil() if pos==None: # random initial positions pos=np.asarray(np.random.random((nnodes,dim)),dtype=A.dtype) else: # make sure positions are of same type as matrix pos=pos.astype(A.dtype) # no fixed nodes if fixed==None: fixed=[] # optimal distance between nodes if k is None: k=np.sqrt(1.0/nnodes) # the initial "temperature" is about .1 of domain area (=1x1) # this is the largest step allowed in the dynamics. t=0.1 # simple cooling scheme. # linearly step down by dt on each iteration so last iteration is size dt. dt=t/float(iterations+1) displacement=np.zeros((dim,nnodes)) for iteration in range(iterations): displacement*=0 # loop over rows for i in range(A.shape[0]): if i in fixed: continue # difference between this row's node position and all others delta=(pos[i]-pos).T # distance between points distance=np.sqrt((delta**2).sum(axis=0)) # enforce minimum distance of 0.01 distance=np.where(distance<0.01,0.01,distance) # the adjacency matrix row Ai=np.asarray(A.getrowview(i).toarray()) # displacement "force" displacement[:,i]+=\ (delta*(k*k/distance**2-Ai*distance/k)).sum(axis=1) # update positions length=np.sqrt((displacement**2).sum(axis=0)) length=np.where(length<0.01,0.1,length) pos+=(displacement*t/length).T # cool temperature t-=dt pos=_rescale_layout(pos) return pos def spectral_layout(G, dim=2, weight='weight', scale=1): """Position nodes using the eigenvectors of the graph Laplacian. Parameters ---------- G : NetworkX graph dim : int Dimension of layout weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for the edge weight. If None, then all edge weights are 1. scale : float Scale factor for positions Returns ------- dict : A dictionary of positions keyed by node Examples -------- >>> G=nx.path_graph(4) >>> pos=nx.spectral_layout(G) Notes ----- Directed graphs will be considered as unidrected graphs when positioning the nodes. For larger graphs (>500 nodes) this will use the SciPy sparse eigenvalue solver (ARPACK). """ # handle some special cases that break the eigensolvers try: import numpy as np except ImportError: raise ImportError("spectral_layout() requires numpy: http://scipy.org/ ") if len(G)<=2: if len(G)==0: pos=np.array([]) elif len(G)==1: pos=np.array([[1,1]]) else: pos=np.array([[0,0.5],[1,0.5]]) return dict(zip(G,pos)) try: # Sparse matrix if len(G)< 500: # dense solver is faster for small graphs raise ValueError A=nx.to_scipy_sparse_matrix(G, weight=weight,dtype='f') # Symmetrize directed graphs if G.is_directed(): A=A+np.transpose(A) pos=_sparse_spectral(A,dim) except (ImportError,ValueError): # Dense matrix A=nx.to_numpy_matrix(G, weight=weight) # Symmetrize directed graphs if G.is_directed(): A=A+np.transpose(A) pos=_spectral(A,dim) pos=_rescale_layout(pos,scale) return dict(zip(G,pos)) def _spectral(A, dim=2): # Input adjacency matrix A # Uses dense eigenvalue solver from numpy try: import numpy as np except ImportError: raise ImportError("spectral_layout() requires numpy: http://scipy.org/ ") try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError(\ "spectral() takes an adjacency matrix as input") # form Laplacian matrix # make sure we have an array instead of a matrix A=np.asarray(A) I=np.identity(nnodes,dtype=A.dtype) D=I*np.sum(A,axis=1) # diagonal of degrees L=D-A eigenvalues,eigenvectors=np.linalg.eig(L) # sort and keep smallest nonzero index=np.argsort(eigenvalues)[1:dim+1] # 0 index is zero eigenvalue return np.real(eigenvectors[:,index]) def _sparse_spectral(A,dim=2): # Input adjacency matrix A # Uses sparse eigenvalue solver from scipy # Could use multilevel methods here, see Koren "On spectral graph drawing" try: import numpy as np from scipy.sparse import spdiags except ImportError: raise ImportError("_sparse_spectral() requires scipy & numpy: http://scipy.org/ ") try: from scipy.sparse.linalg.eigen import eigsh except ImportError: # scipy <0.9.0 names eigsh differently from scipy.sparse.linalg import eigen_symmetric as eigsh try: nnodes,_=A.shape except AttributeError: raise nx.NetworkXError(\ "sparse_spectral() takes an adjacency matrix as input") # form Laplacian matrix data=np.asarray(A.sum(axis=1).T) D=spdiags(data,0,nnodes,nnodes) L=D-A k=dim+1 # number of Lanczos vectors for ARPACK solver.What is the right scaling? ncv=max(2*k+1,int(np.sqrt(nnodes))) # return smallest k eigenvalues and eigenvectors eigenvalues,eigenvectors=eigsh(L,k,which='SM',ncv=ncv) index=np.argsort(eigenvalues)[1:k] # 0 index is zero eigenvalue return np.real(eigenvectors[:,index]) def _rescale_layout(pos,scale=1): # rescale to (0,pscale) in all axes # shift origin to (0,0) lim=0 # max coordinate for all axes for i in range(pos.shape[1]): pos[:,i]-=pos[:,i].min() lim=max(pos[:,i].max(),lim) # rescale to (0,scale) in all directions, preserves aspect for i in range(pos.shape[1]): pos[:,i]*=scale/lim return pos # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/drawing/nx_pylab.py0000664000175000017500000006616112177456333021011 0ustar aricaric00000000000000""" ********** Matplotlib ********** Draw networks with matplotlib. See Also -------- matplotlib: http://matplotlib.sourceforge.net/ pygraphviz: http://networkx.lanl.gov/pygraphviz/ """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.drawing.layout import shell_layout,\ circular_layout,spectral_layout,spring_layout,random_layout __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['draw', 'draw_networkx', 'draw_networkx_nodes', 'draw_networkx_edges', 'draw_networkx_labels', 'draw_networkx_edge_labels', 'draw_circular', 'draw_random', 'draw_spectral', 'draw_spring', 'draw_shell', 'draw_graphviz'] def draw(G, pos=None, ax=None, hold=None, **kwds): """Draw the graph G with Matplotlib. Draw the graph as a simple representation with no node labels or edge labels and using the full Matplotlib figure area and no axis labels by default. See draw_networkx() for more full-featured drawing that allows title, axis labels etc. Parameters ---------- G : graph A networkx graph pos : dictionary, optional A dictionary with nodes as keys and positions as values. If not specified a spring layout positioning will be computed. See networkx.layout for functions that compute node positions. ax : Matplotlib Axes object, optional Draw the graph in specified Matplotlib axes. hold : bool, optional Set the Matplotlib hold state. If True subsequent draw commands will be added to the current axes. **kwds : optional keywords See networkx.draw_networkx() for a description of optional keywords. Examples -------- >>> G=nx.dodecahedral_graph() >>> nx.draw(G) >>> nx.draw(G,pos=nx.spring_layout(G)) # use spring layout See Also -------- draw_networkx() draw_networkx_nodes() draw_networkx_edges() draw_networkx_labels() draw_networkx_edge_labels() Notes ----- This function has the same name as pylab.draw and pyplot.draw so beware when using >>> from networkx import * since you might overwrite the pylab.draw function. With pyplot use >>> import matplotlib.pyplot as plt >>> import networkx as nx >>> G=nx.dodecahedral_graph() >>> nx.draw(G) # networkx draw() >>> plt.draw() # pyplot draw() Also see the NetworkX drawing examples at http://networkx.lanl.gov/gallery.html """ try: import matplotlib.pyplot as plt except ImportError: raise ImportError("Matplotlib required for draw()") except RuntimeError: print("Matplotlib unable to open display") raise if ax is None: cf = plt.gcf() else: cf = ax.get_figure() cf.set_facecolor('w') if ax is None: if cf._axstack() is None: ax=cf.add_axes((0,0,1,1)) else: ax=cf.gca() # allow callers to override the hold state by passing hold=True|False b = plt.ishold() h = kwds.pop('hold', None) if h is not None: plt.hold(h) try: draw_networkx(G,pos=pos,ax=ax,**kwds) ax.set_axis_off() plt.draw_if_interactive() except: plt.hold(b) raise plt.hold(b) return def draw_networkx(G, pos=None, with_labels=True, **kwds): """Draw the graph G using Matplotlib. Draw the graph with Matplotlib with options for node positions, labeling, titles, and many other drawing features. See draw() for simple drawing without labels or axes. Parameters ---------- G : graph A networkx graph pos : dictionary, optional A dictionary with nodes as keys and positions as values. If not specified a spring layout positioning will be computed. See networkx.layout for functions that compute node positions. with_labels : bool, optional (default=True) Set to True to draw labels on the nodes. ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. nodelist : list, optional (default G.nodes()) Draw only specified nodes edgelist : list, optional (default=G.edges()) Draw only specified edges node_size : scalar or array, optional (default=300) Size of nodes. If an array is specified it must be the same length as nodelist. node_color : color string, or array of floats, (default='r') Node color. Can be a single color format string, or a sequence of colors with the same length as nodelist. If numeric values are specified they will be mapped to colors using the cmap and vmin,vmax parameters. See matplotlib.scatter for more details. node_shape : string, optional (default='o') The shape of the node. Specification is as matplotlib.scatter marker, one of 'so^>v>> G=nx.dodecahedral_graph() >>> nx.draw(G) >>> nx.draw(G,pos=nx.spring_layout(G)) # use spring layout >>> import matplotlib.pyplot as plt >>> limits=plt.axis('off') # turn of axis Also see the NetworkX drawing examples at http://networkx.lanl.gov/gallery.html See Also -------- draw() draw_networkx_nodes() draw_networkx_edges() draw_networkx_labels() draw_networkx_edge_labels() """ try: import matplotlib.pyplot as plt except ImportError: raise ImportError("Matplotlib required for draw()") except RuntimeError: print("Matplotlib unable to open display") raise if pos is None: pos=nx.drawing.spring_layout(G) # default to spring layout node_collection=draw_networkx_nodes(G, pos, **kwds) edge_collection=draw_networkx_edges(G, pos, **kwds) if with_labels: draw_networkx_labels(G, pos, **kwds) plt.draw_if_interactive() def draw_networkx_nodes(G, pos, nodelist=None, node_size=300, node_color='r', node_shape='o', alpha=1.0, cmap=None, vmin=None, vmax=None, ax=None, linewidths=None, label = None, **kwds): """Draw the nodes of the graph G. This draws only the nodes of the graph G. Parameters ---------- G : graph A networkx graph pos : dictionary A dictionary with nodes as keys and positions as values. If not specified a spring layout positioning will be computed. See networkx.layout for functions that compute node positions. ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. nodelist : list, optional Draw only specified nodes (default G.nodes()) node_size : scalar or array Size of nodes (default=300). If an array is specified it must be the same length as nodelist. node_color : color string, or array of floats Node color. Can be a single color format string (default='r'), or a sequence of colors with the same length as nodelist. If numeric values are specified they will be mapped to colors using the cmap and vmin,vmax parameters. See matplotlib.scatter for more details. node_shape : string The shape of the node. Specification is as matplotlib.scatter marker, one of 'so^>v>> G=nx.dodecahedral_graph() >>> nodes=nx.draw_networkx_nodes(G,pos=nx.spring_layout(G)) Also see the NetworkX drawing examples at http://networkx.lanl.gov/gallery.html See Also -------- draw() draw_networkx() draw_networkx_edges() draw_networkx_labels() draw_networkx_edge_labels() """ try: import matplotlib.pyplot as plt import numpy except ImportError: raise ImportError("Matplotlib required for draw()") except RuntimeError: print("Matplotlib unable to open display") raise if ax is None: ax=plt.gca() if nodelist is None: nodelist=G.nodes() if not nodelist or len(nodelist)==0: # empty nodelist, no drawing return None try: xy=numpy.asarray([pos[v] for v in nodelist]) except KeyError as e: raise nx.NetworkXError('Node %s has no position.'%e) except ValueError: raise nx.NetworkXError('Bad value in node positions.') node_collection=ax.scatter(xy[:,0], xy[:,1], s=node_size, c=node_color, marker=node_shape, cmap=cmap, vmin=vmin, vmax=vmax, alpha=alpha, linewidths=linewidths, label=label) node_collection.set_zorder(2) return node_collection def draw_networkx_edges(G, pos, edgelist=None, width=1.0, edge_color='k', style='solid', alpha=None, edge_cmap=None, edge_vmin=None, edge_vmax=None, ax=None, arrows=True, label=None, **kwds): """Draw the edges of the graph G. This draws only the edges of the graph G. Parameters ---------- G : graph A networkx graph pos : dictionary A dictionary with nodes as keys and positions as values. If not specified a spring layout positioning will be computed. See networkx.layout for functions that compute node positions. edgelist : collection of edge tuples Draw only specified edges(default=G.edges()) width : float Line width of edges (default =1.0) edge_color : color string, or array of floats Edge color. Can be a single color format string (default='r'), or a sequence of colors with the same length as edgelist. If numeric values are specified they will be mapped to colors using the edge_cmap and edge_vmin,edge_vmax parameters. style : string Edge line style (default='solid') (solid|dashed|dotted,dashdot) alpha : float The edge transparency (default=1.0) edge_ cmap : Matplotlib colormap Colormap for mapping intensities of edges (default=None) edge_vmin,edge_vmax : floats Minimum and maximum for edge colormap scaling (default=None) ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. arrows : bool, optional (default=True) For directed graphs, if True draw arrowheads. label : [None| string] Label for legend Notes ----- For directed graphs, "arrows" (actually just thicker stubs) are drawn at the head end. Arrows can be turned off with keyword arrows=False. Yes, it is ugly but drawing proper arrows with Matplotlib this way is tricky. Examples -------- >>> G=nx.dodecahedral_graph() >>> edges=nx.draw_networkx_edges(G,pos=nx.spring_layout(G)) Also see the NetworkX drawing examples at http://networkx.lanl.gov/gallery.html See Also -------- draw() draw_networkx() draw_networkx_nodes() draw_networkx_labels() draw_networkx_edge_labels() """ try: import matplotlib import matplotlib.pyplot as plt import matplotlib.cbook as cb from matplotlib.colors import colorConverter,Colormap from matplotlib.collections import LineCollection import numpy except ImportError: raise ImportError("Matplotlib required for draw()") except RuntimeError: print("Matplotlib unable to open display") raise if ax is None: ax=plt.gca() if edgelist is None: edgelist=G.edges() if not edgelist or len(edgelist)==0: # no edges! return None # set edge positions edge_pos=numpy.asarray([(pos[e[0]],pos[e[1]]) for e in edgelist]) if not cb.iterable(width): lw = (width,) else: lw = width if not cb.is_string_like(edge_color) \ and cb.iterable(edge_color) \ and len(edge_color)==len(edge_pos): if numpy.alltrue([cb.is_string_like(c) for c in edge_color]): # (should check ALL elements) # list of color letters such as ['k','r','k',...] edge_colors = tuple([colorConverter.to_rgba(c,alpha) for c in edge_color]) elif numpy.alltrue([not cb.is_string_like(c) for c in edge_color]): # If color specs are given as (rgb) or (rgba) tuples, we're OK if numpy.alltrue([cb.iterable(c) and len(c) in (3,4) for c in edge_color]): edge_colors = tuple(edge_color) else: # numbers (which are going to be mapped with a colormap) edge_colors = None else: raise ValueError('edge_color must consist of either color names or numbers') else: if cb.is_string_like(edge_color) or len(edge_color)==1: edge_colors = ( colorConverter.to_rgba(edge_color, alpha), ) else: raise ValueError('edge_color must be a single color or list of exactly m colors where m is the number or edges') edge_collection = LineCollection(edge_pos, colors = edge_colors, linewidths = lw, antialiaseds = (1,), linestyle = style, transOffset = ax.transData, ) edge_collection.set_zorder(1) # edges go behind nodes edge_collection.set_label(label) ax.add_collection(edge_collection) # Note: there was a bug in mpl regarding the handling of alpha values for # each line in a LineCollection. It was fixed in matplotlib in r7184 and # r7189 (June 6 2009). We should then not set the alpha value globally, # since the user can instead provide per-edge alphas now. Only set it # globally if provided as a scalar. if cb.is_numlike(alpha): edge_collection.set_alpha(alpha) if edge_colors is None: if edge_cmap is not None: assert(isinstance(edge_cmap, Colormap)) edge_collection.set_array(numpy.asarray(edge_color)) edge_collection.set_cmap(edge_cmap) if edge_vmin is not None or edge_vmax is not None: edge_collection.set_clim(edge_vmin, edge_vmax) else: edge_collection.autoscale() arrow_collection=None if G.is_directed() and arrows: # a directed graph hack # draw thick line segments at head end of edge # waiting for someone else to implement arrows that will work arrow_colors = edge_colors a_pos=[] p=1.0-0.25 # make head segment 25 percent of edge length for src,dst in edge_pos: x1,y1=src x2,y2=dst dx=x2-x1 # x offset dy=y2-y1 # y offset d=numpy.sqrt(float(dx**2+dy**2)) # length of edge if d==0: # source and target at same position continue if dx==0: # vertical edge xa=x2 ya=dy*p+y1 if dy==0: # horizontal edge ya=y2 xa=dx*p+x1 else: theta=numpy.arctan2(dy,dx) xa=p*d*numpy.cos(theta)+x1 ya=p*d*numpy.sin(theta)+y1 a_pos.append(((xa,ya),(x2,y2))) arrow_collection = LineCollection(a_pos, colors = arrow_colors, linewidths = [4*ww for ww in lw], antialiaseds = (1,), transOffset = ax.transData, ) arrow_collection.set_zorder(1) # edges go behind nodes arrow_collection.set_label(label) ax.add_collection(arrow_collection) # update view minx = numpy.amin(numpy.ravel(edge_pos[:,:,0])) maxx = numpy.amax(numpy.ravel(edge_pos[:,:,0])) miny = numpy.amin(numpy.ravel(edge_pos[:,:,1])) maxy = numpy.amax(numpy.ravel(edge_pos[:,:,1])) w = maxx-minx h = maxy-miny padx, pady = 0.05*w, 0.05*h corners = (minx-padx, miny-pady), (maxx+padx, maxy+pady) ax.update_datalim( corners) ax.autoscale_view() # if arrow_collection: return edge_collection def draw_networkx_labels(G, pos, labels=None, font_size=12, font_color='k', font_family='sans-serif', font_weight='normal', alpha=1.0, ax=None, **kwds): """Draw node labels on the graph G. Parameters ---------- G : graph A networkx graph pos : dictionary, optional A dictionary with nodes as keys and positions as values. If not specified a spring layout positioning will be computed. See networkx.layout for functions that compute node positions. labels : dictionary, optional (default=None) Node labels in a dictionary keyed by node of text labels font_size : int Font size for text labels (default=12) font_color : string Font color string (default='k' black) font_family : string Font family (default='sans-serif') font_weight : string Font weight (default='normal') alpha : float The text transparency (default=1.0) ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. Examples -------- >>> G=nx.dodecahedral_graph() >>> labels=nx.draw_networkx_labels(G,pos=nx.spring_layout(G)) Also see the NetworkX drawing examples at http://networkx.lanl.gov/gallery.html See Also -------- draw() draw_networkx() draw_networkx_nodes() draw_networkx_edges() draw_networkx_edge_labels() """ try: import matplotlib.pyplot as plt import matplotlib.cbook as cb except ImportError: raise ImportError("Matplotlib required for draw()") except RuntimeError: print("Matplotlib unable to open display") raise if ax is None: ax=plt.gca() if labels is None: labels=dict( (n,n) for n in G.nodes()) # set optional alignment horizontalalignment=kwds.get('horizontalalignment','center') verticalalignment=kwds.get('verticalalignment','center') text_items={} # there is no text collection so we'll fake one for n, label in labels.items(): (x,y)=pos[n] if not cb.is_string_like(label): label=str(label) # this will cause "1" and 1 to be labeled the same t=ax.text(x, y, label, size=font_size, color=font_color, family=font_family, weight=font_weight, horizontalalignment=horizontalalignment, verticalalignment=verticalalignment, transform = ax.transData, clip_on=True, ) text_items[n]=t return text_items def draw_networkx_edge_labels(G, pos, edge_labels=None, label_pos=0.5, font_size=10, font_color='k', font_family='sans-serif', font_weight='normal', alpha=1.0, bbox=None, ax=None, rotate=True, **kwds): """Draw edge labels. Parameters ---------- G : graph A networkx graph pos : dictionary, optional A dictionary with nodes as keys and positions as values. If not specified a spring layout positioning will be computed. See networkx.layout for functions that compute node positions. ax : Matplotlib Axes object, optional Draw the graph in the specified Matplotlib axes. alpha : float The text transparency (default=1.0) edge_labels : dictionary Edge labels in a dictionary keyed by edge two-tuple of text labels (default=None). Only labels for the keys in the dictionary are drawn. label_pos : float Position of edge label along edge (0=head, 0.5=center, 1=tail) font_size : int Font size for text labels (default=12) font_color : string Font color string (default='k' black) font_weight : string Font weight (default='normal') font_family : string Font family (default='sans-serif') bbox : Matplotlib bbox Specify text box shape and colors. clip_on : bool Turn on clipping at axis boundaries (default=True) Examples -------- >>> G=nx.dodecahedral_graph() >>> edge_labels=nx.draw_networkx_edge_labels(G,pos=nx.spring_layout(G)) Also see the NetworkX drawing examples at http://networkx.lanl.gov/gallery.html See Also -------- draw() draw_networkx() draw_networkx_nodes() draw_networkx_edges() draw_networkx_labels() """ try: import matplotlib.pyplot as plt import matplotlib.cbook as cb import numpy except ImportError: raise ImportError("Matplotlib required for draw()") except RuntimeError: print("Matplotlib unable to open display") raise if ax is None: ax=plt.gca() if edge_labels is None: labels=dict( ((u,v), d) for u,v,d in G.edges(data=True) ) else: labels = edge_labels text_items={} for (n1,n2), label in labels.items(): (x1,y1)=pos[n1] (x2,y2)=pos[n2] (x,y) = (x1 * label_pos + x2 * (1.0 - label_pos), y1 * label_pos + y2 * (1.0 - label_pos)) if rotate: angle=numpy.arctan2(y2-y1,x2-x1)/(2.0*numpy.pi)*360 # degrees # make label orientation "right-side-up" if angle > 90: angle-=180 if angle < - 90: angle+=180 # transform data coordinate angle to screen coordinate angle xy=numpy.array((x,y)) trans_angle=ax.transData.transform_angles(numpy.array((angle,)), xy.reshape((1,2)))[0] else: trans_angle=0.0 # use default box of white with white border if bbox is None: bbox = dict(boxstyle='round', ec=(1.0, 1.0, 1.0), fc=(1.0, 1.0, 1.0), ) if not cb.is_string_like(label): label=str(label) # this will cause "1" and 1 to be labeled the same # set optional alignment horizontalalignment=kwds.get('horizontalalignment','center') verticalalignment=kwds.get('verticalalignment','center') t=ax.text(x, y, label, size=font_size, color=font_color, family=font_family, weight=font_weight, horizontalalignment=horizontalalignment, verticalalignment=verticalalignment, rotation=trans_angle, transform = ax.transData, bbox = bbox, zorder = 1, clip_on=True, ) text_items[(n1,n2)]=t return text_items def draw_circular(G, **kwargs): """Draw the graph G with a circular layout.""" draw(G,circular_layout(G),**kwargs) def draw_random(G, **kwargs): """Draw the graph G with a random layout.""" draw(G,random_layout(G),**kwargs) def draw_spectral(G, **kwargs): """Draw the graph G with a spectral layout.""" draw(G,spectral_layout(G),**kwargs) def draw_spring(G, **kwargs): """Draw the graph G with a spring layout.""" draw(G,spring_layout(G),**kwargs) def draw_shell(G, **kwargs): """Draw networkx graph with shell layout.""" nlist = kwargs.get('nlist', None) if nlist != None: del(kwargs['nlist']) draw(G,shell_layout(G,nlist=nlist),**kwargs) def draw_graphviz(G, prog="neato", **kwargs): """Draw networkx graph with graphviz layout.""" pos=nx.drawing.graphviz_layout(G,prog) draw(G,pos,**kwargs) def draw_nx(G,pos,**kwds): """For backward compatibility; use draw or draw_networkx.""" draw(G,pos,**kwds) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import matplotlib as mpl mpl.use('PS',warn=False) import matplotlib.pyplot as plt except: raise SkipTest("matplotlib not available") networkx-1.8.1/networkx/drawing/__init__.py0000664000175000017500000000073212177456333020724 0ustar aricaric00000000000000# graph drawing and interface to graphviz import sys from networkx.drawing.layout import * from networkx.drawing.nx_pylab import * # graphviz interface # prefer pygraphviz/agraph (it's faster) from networkx.drawing.nx_agraph import * try: import pydot import networkx.drawing.nx_pydot from networkx.drawing.nx_pydot import * except ImportError: pass try: import pygraphviz from networkx.drawing.nx_agraph import * except ImportError: pass networkx-1.8.1/networkx/drawing/nx_agraph.py0000664000175000017500000003205712177456333021141 0ustar aricaric00000000000000""" *************** Graphviz AGraph *************** Interface to pygraphviz AGraph class. Examples -------- >>> G=nx.complete_graph(5) >>> A=nx.to_agraph(G) >>> H=nx.from_agraph(A) See Also -------- Pygraphviz: http://networkx.lanl.gov/pygraphviz """ # Copyright (C) 2004-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import os import sys import tempfile import networkx as nx __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __all__ = ['from_agraph', 'to_agraph', 'write_dot', 'read_dot', 'graphviz_layout', 'pygraphviz_layout', 'view_pygraphviz'] def from_agraph(A,create_using=None): """Return a NetworkX Graph or DiGraph from a PyGraphviz graph. Parameters ---------- A : PyGraphviz AGraph A graph created with PyGraphviz create_using : NetworkX graph class instance The output is created using the given graph class instance Examples -------- >>> K5=nx.complete_graph(5) >>> A=nx.to_agraph(K5) >>> G=nx.from_agraph(A) >>> G=nx.from_agraph(A) Notes ----- The Graph G will have a dictionary G.graph_attr containing the default graphviz attributes for graphs, nodes and edges. Default node attributes will be in the dictionary G.node_attr which is keyed by node. Edge attributes will be returned as edge data in G. With edge_attr=False the edge data will be the Graphviz edge weight attribute or the value 1 if no edge weight attribute is found. """ if create_using is None: if A.is_directed(): if A.is_strict(): create_using=nx.DiGraph() else: create_using=nx.MultiDiGraph() else: if A.is_strict(): create_using=nx.Graph() else: create_using=nx.MultiGraph() # assign defaults N=nx.empty_graph(0,create_using) N.name='' if A.name is not None: N.name=A.name # add nodes, attributes to N.node_attr for n in A.nodes(): str_attr=dict((str(k),v) for k,v in n.attr.items()) N.add_node(str(n),**str_attr) # add edges, assign edge data as dictionary of attributes for e in A.edges(): u,v=str(e[0]),str(e[1]) attr=dict(e.attr) str_attr=dict((str(k),v) for k,v in attr.items()) if not N.is_multigraph(): if e.name is not None: str_attr['key']=e.name N.add_edge(u,v,**str_attr) else: N.add_edge(u,v,key=e.name,**str_attr) # add default attributes for graph, nodes, and edges # hang them on N.graph_attr N.graph['graph']=dict(A.graph_attr) N.graph['node']=dict(A.node_attr) N.graph['edge']=dict(A.edge_attr) return N def to_agraph(N): """Return a pygraphviz graph from a NetworkX graph N. Parameters ---------- N : NetworkX graph A graph created with NetworkX Examples -------- >>> K5=nx.complete_graph(5) >>> A=nx.to_agraph(K5) Notes ----- If N has an dict N.graph_attr an attempt will be made first to copy properties attached to the graph (see from_agraph) and then updated with the calling arguments if any. """ try: import pygraphviz except ImportError: raise ImportError('requires pygraphviz ', 'http://networkx.lanl.gov/pygraphviz ', '(not available for Python3)') directed=N.is_directed() strict=N.number_of_selfloops()==0 and not N.is_multigraph() A=pygraphviz.AGraph(name=N.name,strict=strict,directed=directed) # default graph attributes A.graph_attr.update(N.graph.get('graph',{})) A.node_attr.update(N.graph.get('node',{})) A.edge_attr.update(N.graph.get('edge',{})) # add nodes for n,nodedata in N.nodes(data=True): A.add_node(n,**nodedata) # loop over edges if N.is_multigraph(): for u,v,key,edgedata in N.edges_iter(data=True,keys=True): str_edgedata=dict((k,str(v)) for k,v in edgedata.items()) A.add_edge(u,v,key=str(key),**str_edgedata) else: for u,v,edgedata in N.edges_iter(data=True): str_edgedata=dict((k,str(v)) for k,v in edgedata.items()) A.add_edge(u,v,**str_edgedata) return A def write_dot(G,path): """Write NetworkX graph G to Graphviz dot format on path. Parameters ---------- G : graph A networkx graph path : filename Filename or file handle to write """ try: import pygraphviz except ImportError: raise ImportError('requires pygraphviz ', 'http://networkx.lanl.gov/pygraphviz ', '(not available for Python3)') A=to_agraph(G) A.write(path) A.clear() return def read_dot(path): """Return a NetworkX graph from a dot file on path. Parameters ---------- path : file or string File name or file handle to read. """ try: import pygraphviz except ImportError: raise ImportError('read_dot() requires pygraphviz ', 'http://networkx.lanl.gov/pygraphviz ', '(not available for Python3)') A=pygraphviz.AGraph(file=path) return from_agraph(A) def graphviz_layout(G,prog='neato',root=None, args=''): """Create node positions for G using Graphviz. Parameters ---------- G : NetworkX graph A graph created with NetworkX prog : string Name of Graphviz layout program root : string, optional Root node for twopi layout args : string, optional Extra arguments to Graphviz layout program Returns : dictionary Dictionary of x,y, positions keyed by node. Examples -------- >>> G=nx.petersen_graph() >>> pos=nx.graphviz_layout(G) >>> pos=nx.graphviz_layout(G,prog='dot') Notes ----- This is a wrapper for pygraphviz_layout. """ return pygraphviz_layout(G,prog=prog,root=root,args=args) def pygraphviz_layout(G,prog='neato',root=None, args=''): """Create node positions for G using Graphviz. Parameters ---------- G : NetworkX graph A graph created with NetworkX prog : string Name of Graphviz layout program root : string, optional Root node for twopi layout args : string, optional Extra arguments to Graphviz layout program Returns : dictionary Dictionary of x,y, positions keyed by node. Examples -------- >>> G=nx.petersen_graph() >>> pos=nx.graphviz_layout(G) >>> pos=nx.graphviz_layout(G,prog='dot') """ try: import pygraphviz except ImportError: raise ImportError('requires pygraphviz ', 'http://networkx.lanl.gov/pygraphviz ', '(not available for Python3)') if root is not None: args+="-Groot=%s"%root A=to_agraph(G) A.layout(prog=prog,args=args) node_pos={} for n in G: node=pygraphviz.Node(A,n) try: xx,yy=node.attr["pos"].split(',') node_pos[n]=(float(xx),float(yy)) except: print("no position for node",n) node_pos[n]=(0.0,0.0) return node_pos @nx.utils.open_file(5, 'w') def view_pygraphviz(G, edgelabel=None, prog='dot', args='', suffix='', path=None): """Views the graph G using the specified layout algorithm. Parameters ---------- G : NetworkX graph The machine to draw. edgelabel : str, callable, None If a string, then it specifes the edge attribute to be displayed on the edge labels. If a callable, then it is called for each edge and it should return the string to be displayed on the edges. The function signature of `edgelabel` should be edgelabel(data), where `data` is the edge attribute dictionary. prog : string Name of Graphviz layout program. args : str Additional arguments to pass to the Graphviz layout program. suffix : str If `filename` is None, we save to a temporary file. The value of `suffix` will appear at the tail end of the temporary filename. path : str, None The filename used to save the image. If None, save to a temporary file. File formats are the same as those from pygraphviz.agraph.draw. Returns ------- path : str The filename of the generated image. A : PyGraphviz graph The PyGraphviz graph instance used to generate the image. Notes ----- If this function is called in succession too quickly, sometimes the image is not displayed. So you might consider time.sleep(.5) between calls if you experience problems. """ if not len(G): raise nx.NetworkXException("An empty graph cannot be drawn.") import pygraphviz # If we are providing default values for graphviz, these must be set # before any nodes or edges are added to the PyGraphviz graph object. # The reason for this is that default values only affect incoming objects. # If you change the default values after the objects have been added, # then they inherit no value and are set only if explicitly set. # to_agraph() uses these values. attrs = ['edge', 'node', 'graph'] for attr in attrs: if attr not in G.graph: G.graph[attr] = {} # These are the default values. edge_attrs = {'fontsize': '10'} node_attrs = {'style': 'filled', 'fillcolor': '#0000FF40', 'height': '0.75', 'width': '0.75', 'shape': 'circle'} graph_attrs = {} def update_attrs(which, attrs): # Update graph attributes. Return list of those which were added. added = [] for k,v in attrs.items(): if k not in G.graph[which]: G.graph[which][k] = v added.append(k) def clean_attrs(which, added): # Remove added attributes for attr in added: del G.graph[which][attr] if not G.graph[which]: del G.graph[which] # Update all default values update_attrs('edge', edge_attrs) update_attrs('node', node_attrs) update_attrs('graph', graph_attrs) # Convert to agraph, so we inherit default values A = to_agraph(G) # Remove the default values we added to the original graph. clean_attrs('edge', edge_attrs) clean_attrs('node', node_attrs) clean_attrs('graph', graph_attrs) # If the user passed in an edgelabel, we update the labels for all edges. if edgelabel is not None: if not hasattr(edgelabel, '__call__'): def func(data): return ''.join([" ", str(data[edgelabel]), " "]) else: func = edgelabel # update all the edge labels if G.is_multigraph(): for u,v,key,data in G.edges_iter(keys=True, data=True): # PyGraphviz doesn't convert the key to a string. See #339 edge = A.get_edge(u,v,str(key)) edge.attr['label'] = str(func(data)) else: for u,v,data in G.edges_iter(data=True): edge = A.get_edge(u,v) edge.attr['label'] = str(func(data)) if path is None: ext = 'png' if suffix: suffix = '_%s.%s' % (suffix, ext) else: suffix = '.%s' % (ext,) path = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) else: # Assume the decorator worked and it is a file-object. pass display_pygraphviz(A, path=path, prog=prog, args=args) return path.name, A def display_pygraphviz(graph, path, format=None, prog=None, args=''): """Internal function to display a graph in OS dependent manner. Parameters ---------- graph : PyGraphviz graph A PyGraphviz AGraph instance. path : file object An already opened file object that will be closed. format : str, None An attempt is made to guess the output format based on the extension of the filename. If that fails, the value of `format` is used. prog : string Name of Graphviz layout program. args : str Additional arguments to pass to the Graphviz layout program. Notes ----- If this function is called in succession too quickly, sometimes the image is not displayed. So you might consider time.sleep(.5) between calls if you experience problems. """ if format is None: filename = path.name format = os.path.splitext(filename)[1].lower()[1:] if not format: # Let the draw() function use its default format = None # Save to a file and display in the default viewer. # We must close the file before viewing it. graph.draw(path, format, prog, args) path.close() nx.utils.default_opener(filename) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import pygraphviz except: raise SkipTest("pygraphviz not available") networkx-1.8.1/networkx/drawing/tests/0000775000175000017500000000000012177457361017755 5ustar aricaric00000000000000networkx-1.8.1/networkx/drawing/tests/test_agraph.py0000664000175000017500000000346612177456333022637 0ustar aricaric00000000000000"""Unit tests for PyGraphviz intefaace. """ import os import tempfile from nose import SkipTest from nose.tools import assert_true,assert_equal import networkx as nx class TestAGraph(object): @classmethod def setupClass(cls): global pygraphviz try: import pygraphviz except ImportError: raise SkipTest('PyGraphviz not available.') def build_graph(self, G): G.add_edge('A','B') G.add_edge('A','C') G.add_edge('A','C') G.add_edge('B','C') G.add_edge('A','D') G.add_node('E') return G def assert_equal(self, G1, G2): assert_true( sorted(G1.nodes())==sorted(G2.nodes()) ) assert_true( sorted(G1.edges())==sorted(G2.edges()) ) def agraph_checks(self, G): G = self.build_graph(G) A=nx.to_agraph(G) H=nx.from_agraph(A) self.assert_equal(G, H) fname=tempfile.mktemp() nx.drawing.nx_agraph.write_dot(H,fname) Hin=nx.drawing.nx_agraph.read_dot(fname) os.unlink(fname) self.assert_equal(H,Hin) (fd,fname)=tempfile.mkstemp() fh=open(fname,'w') nx.drawing.nx_agraph.write_dot(H,fh) fh.close() fh=open(fname,'r') Hin=nx.drawing.nx_agraph.read_dot(fh) fh.close() os.unlink(fname) self.assert_equal(H,Hin) def test_from_agraph_name(self): G=nx.Graph(name='test') A=nx.to_agraph(G) H=nx.from_agraph(A) assert_equal(G.name,'test') def testUndirected(self): self.agraph_checks(nx.Graph()) def testDirected(self): self.agraph_checks(nx.DiGraph()) def testMultiUndirected(self): self.agraph_checks(nx.MultiGraph()) def testMultiDirected(self): self.agraph_checks(nx.MultiDiGraph()) networkx-1.8.1/networkx/drawing/tests/test_layout.py0000664000175000017500000000351612177456333022706 0ustar aricaric00000000000000"""Unit tests for layout functions.""" import sys from nose import SkipTest from nose.tools import assert_equal import networkx as nx class TestLayout(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global numpy try: import numpy except ImportError: raise SkipTest('numpy not available.') def setUp(self): self.Gi=nx.grid_2d_graph(5,5) self.Gs=nx.Graph() self.Gs.add_path('abcdef') self.bigG=nx.grid_2d_graph(25,25) #bigger than 500 nodes for sparse def test_smoke_int(self): G=self.Gi vpos=nx.random_layout(G) vpos=nx.circular_layout(G) vpos=nx.spring_layout(G) vpos=nx.fruchterman_reingold_layout(G) vpos=nx.spectral_layout(G) vpos=nx.spectral_layout(self.bigG) vpos=nx.shell_layout(G) def test_smoke_string(self): G=self.Gs vpos=nx.random_layout(G) vpos=nx.circular_layout(G) vpos=nx.spring_layout(G) vpos=nx.fruchterman_reingold_layout(G) vpos=nx.spectral_layout(G) vpos=nx.shell_layout(G) def test_adjacency_interface_numpy(self): A=nx.to_numpy_matrix(self.Gs) pos=nx.drawing.layout._fruchterman_reingold(A) pos=nx.drawing.layout._fruchterman_reingold(A,dim=3) assert_equal(pos.shape,(6,3)) def test_adjacency_interface_scipy(self): try: import scipy except ImportError: raise SkipTest('scipy not available.') A=nx.to_scipy_sparse_matrix(self.Gs,dtype='f') pos=nx.drawing.layout._sparse_fruchterman_reingold(A) pos=nx.drawing.layout._sparse_spectral(A) pos=nx.drawing.layout._sparse_fruchterman_reingold(A,dim=3) assert_equal(pos.shape,(6,3)) networkx-1.8.1/networkx/drawing/tests/test_pydot.py0000664000175000017500000000321112177456333022520 0ustar aricaric00000000000000""" Unit tests for pydot drawing functions. """ import os import tempfile from nose import SkipTest from nose.tools import assert_true import networkx as nx class TestPydot(object): @classmethod def setupClass(cls): global pydot try: import pydot import dot_parser except ImportError: raise SkipTest('pydot not available.') def build_graph(self, G): G.add_edge('A','B') G.add_edge('A','C') G.add_edge('B','C') G.add_edge('A','D') G.add_node('E') return G, nx.to_pydot(G) def assert_equal(self, G1, G2): assert_true( sorted(G1.nodes())==sorted(G2.nodes()) ) assert_true( sorted(G1.edges())==sorted(G2.edges()) ) def pydot_checks(self, G): H, P = self.build_graph(G) G2 = H.__class__(nx.from_pydot(P)) self.assert_equal(H, G2) fname = tempfile.mktemp() assert_true( P.write_raw(fname) ) Pin = pydot.graph_from_dot_file(fname) n1 = sorted([p.get_name() for p in P.get_node_list()]) n2 = sorted([p.get_name() for p in Pin.get_node_list()]) assert_true( n1 == n2 ) e1=[(e.get_source(),e.get_destination()) for e in P.get_edge_list()] e2=[(e.get_source(),e.get_destination()) for e in Pin.get_edge_list()] assert_true( sorted(e1)==sorted(e2) ) Hin = nx.drawing.nx_pydot.read_dot(fname) Hin = H.__class__(Hin) self.assert_equal(H, Hin) # os.unlink(fname) def testUndirected(self): self.pydot_checks(nx.Graph()) def testDirected(self): self.pydot_checks(nx.DiGraph()) networkx-1.8.1/networkx/drawing/tests/test_pylab.py0000664000175000017500000000165512177456333022502 0ustar aricaric00000000000000""" Unit tests for matplotlib drawing functions. """ import os from nose import SkipTest import networkx as nx class TestPylab(object): @classmethod def setupClass(cls): global plt try: import matplotlib as mpl mpl.use('PS',warn=False) import matplotlib.pyplot as plt except ImportError: raise SkipTest('matplotlib not available.') except RuntimeError: raise SkipTest('matplotlib not available.') def setUp(self): self.G=nx.barbell_graph(5,10) def test_draw(self): N=self.G nx.draw_spring(N) plt.savefig("test.ps") nx.draw_random(N) plt.savefig("test.ps") nx.draw_circular(N) plt.savefig("test.ps") nx.draw_spectral(N) plt.savefig("test.ps") nx.draw_spring(N.to_directed()) plt.savefig("test.ps") os.unlink('test.ps') networkx-1.8.1/networkx/drawing/nx_pydot.py0000664000175000017500000001715212177456333021035 0ustar aricaric00000000000000""" ***** Pydot ***** Import and export NetworkX graphs in Graphviz dot format using pydot. Either this module or nx_pygraphviz can be used to interface with graphviz. See Also -------- Pydot: http://code.google.com/p/pydot/ Graphviz: http://www.research.att.com/sw/tools/graphviz/ DOT Language: http://www.graphviz.org/doc/info/lang.html """ # Copyright (C) 2004-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx.utils import open_file, make_str import networkx as nx __author__ = """Aric Hagberg (aric.hagberg@gmail.com)""" __all__ = ['write_dot', 'read_dot', 'graphviz_layout', 'pydot_layout', 'to_pydot', 'from_pydot'] @open_file(1,mode='w') def write_dot(G,path): """Write NetworkX graph G to Graphviz dot format on path. Path can be a string or a file handle. """ try: import pydot except ImportError: raise ImportError("write_dot() requires pydot", "http://code.google.com/p/pydot/") P=to_pydot(G) path.write(P.to_string()) return @open_file(0,mode='r') def read_dot(path): """Return a NetworkX MultiGraph or MultiDiGraph from a dot file on path. Parameters ---------- path : filename or file handle Returns ------- G : NetworkX multigraph A MultiGraph or MultiDiGraph. Notes ----- Use G=nx.Graph(nx.read_dot(path)) to return a Graph instead of a MultiGraph. """ try: import pydot except ImportError: raise ImportError("read_dot() requires pydot", "http://code.google.com/p/pydot/") data=path.read() P=pydot.graph_from_dot_data(data) return from_pydot(P) def from_pydot(P): """Return a NetworkX graph from a Pydot graph. Parameters ---------- P : Pydot graph A graph created with Pydot Returns ------- G : NetworkX multigraph A MultiGraph or MultiDiGraph. Examples -------- >>> K5=nx.complete_graph(5) >>> A=nx.to_pydot(K5) >>> G=nx.from_pydot(A) # return MultiGraph >>> G=nx.Graph(nx.from_pydot(A)) # make a Graph instead of MultiGraph """ if P.get_strict(None): # pydot bug: get_strict() shouldn't take argument multiedges=False else: multiedges=True if P.get_type()=='graph': # undirected if multiedges: create_using=nx.MultiGraph() else: create_using=nx.Graph() else: if multiedges: create_using=nx.MultiDiGraph() else: create_using=nx.DiGraph() # assign defaults N=nx.empty_graph(0,create_using) N.name=P.get_name() # add nodes, attributes to N.node_attr for p in P.get_node_list(): n=p.get_name().strip('"') if n in ('node','graph','edge'): continue N.add_node(n,**p.get_attributes()) # add edges for e in P.get_edge_list(): u=e.get_source().strip('"') v=e.get_destination().strip('"') attr=e.get_attributes() N.add_edge(u,v,**attr) # add default attributes for graph, nodes, edges N.graph['graph']=P.get_attributes() try: N.graph['node']=P.get_node_defaults()[0] except:# IndexError,TypeError: N.graph['node']={} try: N.graph['edge']=P.get_edge_defaults()[0] except:# IndexError,TypeError: N.graph['edge']={} return N def to_pydot(N, strict=True): """Return a pydot graph from a NetworkX graph N. Parameters ---------- N : NetworkX graph A graph created with NetworkX Examples -------- >>> K5=nx.complete_graph(5) >>> P=nx.to_pydot(K5) Notes ----- """ try: import pydot except ImportError: raise ImportError('to_pydot() requires pydot: ' 'http://code.google.com/p/pydot/') # set Graphviz graph type if N.is_directed(): graph_type='digraph' else: graph_type='graph' strict=N.number_of_selfloops()==0 and not N.is_multigraph() name = N.graph.get('name') graph_defaults=N.graph.get('graph',{}) if name is None: P = pydot.Dot(graph_type=graph_type,strict=strict,**graph_defaults) else: P = pydot.Dot('"%s"'%name,graph_type=graph_type,strict=strict, **graph_defaults) try: P.set_node_defaults(**N.graph['node']) except KeyError: pass try: P.set_edge_defaults(**N.graph['edge']) except KeyError: pass for n,nodedata in N.nodes_iter(data=True): str_nodedata=dict((k,make_str(v)) for k,v in nodedata.items()) p=pydot.Node(make_str(n),**str_nodedata) P.add_node(p) if N.is_multigraph(): for u,v,key,edgedata in N.edges_iter(data=True,keys=True): str_edgedata=dict((k,make_str(v)) for k,v in edgedata.items()) edge=pydot.Edge(make_str(u),make_str(v),key=make_str(key),**str_edgedata) P.add_edge(edge) else: for u,v,edgedata in N.edges_iter(data=True): str_edgedata=dict((k,make_str(v)) for k,v in edgedata.items()) edge=pydot.Edge(make_str(u),make_str(v),**str_edgedata) P.add_edge(edge) return P def pydot_from_networkx(N): """Create a Pydot graph from a NetworkX graph.""" from warnings import warn warn('pydot_from_networkx is replaced by to_pydot', DeprecationWarning) return to_pydot(N) def networkx_from_pydot(D, create_using=None): """Create a NetworkX graph from a Pydot graph.""" from warnings import warn warn('networkx_from_pydot is replaced by from_pydot', DeprecationWarning) return from_pydot(D) def graphviz_layout(G,prog='neato',root=None, **kwds): """Create node positions using Pydot and Graphviz. Returns a dictionary of positions keyed by node. Examples -------- >>> G=nx.complete_graph(4) >>> pos=nx.graphviz_layout(G) >>> pos=nx.graphviz_layout(G,prog='dot') Notes ----- This is a wrapper for pydot_layout. """ return pydot_layout(G=G,prog=prog,root=root,**kwds) def pydot_layout(G,prog='neato',root=None, **kwds): """Create node positions using Pydot and Graphviz. Returns a dictionary of positions keyed by node. Examples -------- >>> G=nx.complete_graph(4) >>> pos=nx.pydot_layout(G) >>> pos=nx.pydot_layout(G,prog='dot') """ try: import pydot except ImportError: raise ImportError('pydot_layout() requires pydot ', 'http://code.google.com/p/pydot/') P=to_pydot(G) if root is not None : P.set("root",make_str(root)) D=P.create_dot(prog=prog) if D=="": # no data returned print("Graphviz layout with %s failed"%(prog)) print() print("To debug what happened try:") print("P=pydot_from_networkx(G)") print("P.write_dot(\"file.dot\")") print("And then run %s on file.dot"%(prog)) return Q=pydot.graph_from_dot_data(D) node_pos={} for n in G.nodes(): pydot_node = pydot.Node(make_str(n)).get_name().encode('utf-8') node=Q.get_node(pydot_node) if isinstance(node,list): node=node[0] pos=node.get_pos()[1:-1] # strip leading and trailing double quotes if pos != None: xx,yy=pos.split(",") node_pos[n]=(float(xx),float(yy)) return node_pos # fixture for nose tests def setup_module(module): from nose import SkipTest try: import pydot import dot_parser except: raise SkipTest("pydot not available") networkx-1.8.1/networkx/relabel.py0000664000175000017500000001557712177456333017155 0ustar aricaric00000000000000# Copyright (C) 2006-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = """\n""".join(['Aric Hagberg ', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult (dschult@colgate.edu)']) __all__ = ['convert_node_labels_to_integers', 'relabel_nodes'] def relabel_nodes(G, mapping, copy=True): """Relabel the nodes of the graph G. Parameters ---------- G : graph A NetworkX graph mapping : dictionary A dictionary with the old labels as keys and new labels as values. A partial mapping is allowed. copy : bool (optional, default=True) If True return a copy, or if False relabel the nodes in place. Examples -------- >>> G=nx.path_graph(3) # nodes 0-1-2 >>> mapping={0:'a',1:'b',2:'c'} >>> H=nx.relabel_nodes(G,mapping) >>> print(sorted(H.nodes())) ['a', 'b', 'c'] >>> G=nx.path_graph(26) # nodes 0..25 >>> mapping=dict(zip(G.nodes(),"abcdefghijklmnopqrstuvwxyz")) >>> H=nx.relabel_nodes(G,mapping) # nodes a..z >>> mapping=dict(zip(G.nodes(),range(1,27))) >>> G1=nx.relabel_nodes(G,mapping) # nodes 1..26 Partial in-place mapping: >>> G=nx.path_graph(3) # nodes 0-1-2 >>> mapping={0:'a',1:'b'} # 0->'a' and 1->'b' >>> G=nx.relabel_nodes(G,mapping, copy=False) print(G.nodes()) [2, 'b', 'a'] Mapping as function: >>> G=nx.path_graph(3) >>> def mapping(x): ... return x**2 >>> H=nx.relabel_nodes(G,mapping) >>> print(H.nodes()) [0, 1, 4] Notes ----- Only the nodes specified in the mapping will be relabeled. The keyword setting copy=False modifies the graph in place. This is not always possible if the mapping is circular. In that case use copy=True. See Also -------- convert_node_labels_to_integers """ # you can pass a function f(old_label)->new_label # but we'll just make a dictionary here regardless if not hasattr(mapping,"__getitem__"): m = dict((n,mapping(n)) for n in G) else: m=mapping if copy: return _relabel_copy(G,m) else: return _relabel_inplace(G,m) def _relabel_inplace(G, mapping): old_labels=set(mapping.keys()) new_labels=set(mapping.values()) if len(old_labels & new_labels) > 0: # labels sets overlap # can we topological sort and still do the relabeling? D=nx.DiGraph(list(mapping.items())) D.remove_edges_from(D.selfloop_edges()) try: nodes=nx.topological_sort(D) except nx.NetworkXUnfeasible: raise nx.NetworkXUnfeasible('The node label sets are overlapping ' 'and no ordering can resolve the ' 'mapping. Use copy=True.') nodes.reverse() # reverse topological order else: # non-overlapping label sets nodes=old_labels multigraph = G.is_multigraph() directed = G.is_directed() for old in nodes: try: new=mapping[old] except KeyError: continue try: G.add_node(new,attr_dict=G.node[old]) except KeyError: raise KeyError("Node %s is not in the graph"%old) if multigraph: new_edges=[(new,old == target and new or target,key,data) for (_,target,key,data) in G.edges(old,data=True,keys=True)] if directed: new_edges+=[(old == source and new or source,new,key,data) for (source,_,key,data) in G.in_edges(old,data=True,keys=True)] else: new_edges=[(new,old == target and new or target,data) for (_,target,data) in G.edges(old,data=True)] if directed: new_edges+=[(old == source and new or source,new,data) for (source,_,data) in G.in_edges(old,data=True)] G.remove_node(old) G.add_edges_from(new_edges) return G def _relabel_copy(G, mapping): H=G.__class__() H.name="(%s)" % G.name if G.is_multigraph(): H.add_edges_from( (mapping.get(n1,n1),mapping.get(n2,n2),k,d.copy()) for (n1,n2,k,d) in G.edges_iter(keys=True,data=True)) else: H.add_edges_from( (mapping.get(n1,n1),mapping.get(n2,n2),d.copy()) for (n1,n2,d) in G.edges_iter(data=True)) H.add_nodes_from(mapping.get(n,n) for n in G) H.node.update(dict((mapping.get(n,n),d.copy()) for n,d in G.node.items())) H.graph.update(G.graph.copy()) return H def convert_node_labels_to_integers(G, first_label=0, ordering="default", label_attribute=None): """Return a copy of the graph G with the nodes relabeled with integers. Parameters ---------- G : graph A NetworkX graph first_label : int, optional (default=0) An integer specifying the offset in numbering nodes. The n new integer labels are numbered first_label, ..., n-1+first_label. ordering : string "default" : inherit node ordering from G.nodes() "sorted" : inherit node ordering from sorted(G.nodes()) "increasing degree" : nodes are sorted by increasing degree "decreasing degree" : nodes are sorted by decreasing degree label_attribute : string, optional (default=None) Name of node attribute to store old label. If None no attribute is created. Notes ----- Node and edge attribute data are copied to the new (relabeled) graph. See Also -------- relabel_nodes """ N = G.number_of_nodes()+first_label if ordering == "default": mapping = dict(zip(G.nodes(),range(first_label,N))) elif ordering == "sorted": nlist = G.nodes() nlist.sort() mapping=dict(zip(nlist,range(first_label,N))) elif ordering == "increasing degree": dv_pairs=[(d,n) for (n,d) in G.degree_iter()] dv_pairs.sort() # in-place sort from lowest to highest degree mapping = dict(zip([n for d,n in dv_pairs],range(first_label,N))) elif ordering == "decreasing degree": dv_pairs = [(d,n) for (n,d) in G.degree_iter()] dv_pairs.sort() # in-place sort from lowest to highest degree dv_pairs.reverse() mapping = dict(zip([n for d,n in dv_pairs],range(first_label,N))) else: raise nx.NetworkXError('Unknown node ordering: %s'%ordering) H = relabel_nodes(G,mapping) H.name="("+G.name+")_with_int_labels" # create node attribute with the old label if label_attribute is not None: nx.set_node_attributes(H, label_attribute, dict((v,k) for k,v in mapping.items())) return H networkx-1.8.1/networkx/linalg/0000775000175000017500000000000012177457361016426 5ustar aricaric00000000000000networkx-1.8.1/networkx/linalg/attrmatrix.py0000664000175000017500000003717212177456333021207 0ustar aricaric00000000000000""" Functions for constructing matrix-like objects from graph attributes. """ __all__ = ['attr_matrix', 'attr_sparse_matrix'] import networkx as nx def _node_value(G, node_attr): """Returns a function that returns a value from G.node[u]. We return a function expecting a node as its sole argument. Then, in the simplest scenario, the returned function will return G.node[u][node_attr]. However, we also handle the case when `node_attr` is None or when it is a function itself. Parameters ---------- G : graph A NetworkX graph node_attr : {None, str, callable} Specification of how the value of the node attribute should be obtained from the node attribute dictionary. Returns ------- value : function A function expecting a node as its sole argument. The function will returns a value from G.node[u] that depends on `edge_attr`. """ if node_attr is None: value = lambda u: u elif not hasattr(node_attr, '__call__'): # assume it is a key for the node attribute dictionary value = lambda u: G.node[u][node_attr] else: # Advanced: Allow users to specify something else. # # For example, # node_attr = lambda u: G.node[u].get('size', .5) * 3 # value = node_attr return value def _edge_value(G, edge_attr): """Returns a function that returns a value from G[u][v]. Suppose there exists an edge between u and v. Then we return a function expecting u and v as arguments. For Graph and DiGraph, G[u][v] is the edge attribute dictionary, and the function (essentially) returns G[u][v][edge_attr]. However, we also handle cases when `edge_attr` is None and when it is a function itself. For MultiGraph and MultiDiGraph, G[u][v] is a dictionary of all edges between u and v. In this case, the returned function sums the value of `edge_attr` for every edge between u and v. Parameters ---------- G : graph A NetworkX graph edge_attr : {None, str, callable} Specification of how the value of the edge attribute should be obtained from the edge attribute dictionary, G[u][v]. For multigraphs, G[u][v] is a dictionary of all the edges between u and v. This allows for special treatment of multiedges. Returns ------- value : function A function expecting two nodes as parameters. The nodes should represent the from- and to- node of an edge. The function will return a value from G[u][v] that depends on `edge_attr`. """ if edge_attr is None: # topological count of edges if G.is_multigraph(): value = lambda u,v: len(G[u][v]) else: value = lambda u,v: 1 elif not hasattr(edge_attr, '__call__'): # assume it is a key for the edge attribute dictionary if edge_attr == 'weight': # provide a default value if G.is_multigraph(): value = lambda u,v: sum([d.get(edge_attr, 1) for d in G[u][v].values()]) else: value = lambda u,v: G[u][v].get(edge_attr, 1) else: # otherwise, the edge attribute MUST exist for each edge if G.is_multigraph(): value = lambda u,v: sum([d[edge_attr] for d in G[u][v].values()]) else: value = lambda u,v: G[u][v][edge_attr] else: # Advanced: Allow users to specify something else. # # Alternative default value: # edge_attr = lambda u,v: G[u][v].get('thickness', .5) # # Function on an attribute: # edge_attr = lambda u,v: abs(G[u][v]['weight']) # # Handle Multi(Di)Graphs differently: # edge_attr = lambda u,v: numpy.prod([d['size'] for d in G[u][v].values()]) # # Ignore multiple edges # edge_attr = lambda u,v: 1 if len(G[u][v]) else 0 # value = edge_attr return value def attr_matrix(G, edge_attr=None, node_attr=None, normalized=False, rc_order=None, dtype=None, order=None): """Returns a NumPy matrix using attributes from G. If only `G` is passed in, then the adjacency matrix is constructed. Let A be a discrete set of values for the node attribute `node_attr`. Then the elements of A represent the rows and columns of the constructed matrix. Now, iterate through every edge e=(u,v) in `G` and consider the value of the edge attribute `edge_attr`. If ua and va are the values of the node attribute `node_attr` for u and v, respectively, then the value of the edge attribute is added to the matrix element at (ua, va). Parameters ---------- G : graph The NetworkX graph used to construct the NumPy matrix. edge_attr : str, optional Each element of the matrix represents a running total of the specified edge attribute for edges whose node attributes correspond to the rows/cols of the matirx. The attribute must be present for all edges in the graph. If no attribute is specified, then we just count the number of edges whose node attributes correspond to the matrix element. node_attr : str, optional Each row and column in the matrix represents a particular value of the node attribute. The attribute must be present for all nodes in the graph. Note, the values of this attribute should be reliably hashable. So, float values are not recommended. If no attribute is specified, then the rows and columns will be the nodes of the graph. normalized : bool, optional If True, then each row is normalized by the summation of its values. rc_order : list, optional A list of the node attribute values. This list specifies the ordering of rows and columns of the array. If no ordering is provided, then the ordering will be random (and also, a return value). Other Parameters ---------------- dtype : NumPy data-type, optional A valid NumPy dtype used to initialize the array. Keep in mind certain dtypes can yield unexpected results if the array is to be normalized. The parameter is passed to numpy.zeros(). If unspecified, the NumPy default is used. order : {'C', 'F'}, optional Whether to store multidimensional data in C- or Fortran-contiguous (row- or column-wise) order in memory. This parameter is passed to numpy.zeros(). If unspecified, the NumPy default is used. Returns ------- M : NumPy matrix The attribute matrix. ordering : list If `rc_order` was specified, then only the matrix is returned. However, if `rc_order` was None, then the ordering used to construct the matrix is returned as well. Examples -------- Construct an adjacency matrix: >>> G = nx.Graph() >>> G.add_edge(0,1,thickness=1,weight=3) >>> G.add_edge(0,2,thickness=2) >>> G.add_edge(1,2,thickness=3) >>> nx.attr_matrix(G, rc_order=[0,1,2]) matrix([[ 0., 1., 1.], [ 1., 0., 1.], [ 1., 1., 0.]]) Alternatively, we can obtain the matrix describing edge thickness. >>> nx.attr_matrix(G, edge_attr='thickness', rc_order=[0,1,2]) matrix([[ 0., 1., 2.], [ 1., 0., 3.], [ 2., 3., 0.]]) We can also color the nodes and ask for the probability distribution over all edges (u,v) describing: Pr(v has color Y | u has color X) >>> G.node[0]['color'] = 'red' >>> G.node[1]['color'] = 'red' >>> G.node[2]['color'] = 'blue' >>> rc = ['red', 'blue'] >>> nx.attr_matrix(G, node_attr='color', normalized=True, rc_order=rc) matrix([[ 0.33333333, 0.66666667], [ 1. , 0. ]]) For example, the above tells us that for all edges (u,v): Pr( v is red | u is red) = 1/3 Pr( v is blue | u is red) = 2/3 Pr( v is red | u is blue) = 1 Pr( v is blue | u is blue) = 0 Finally, we can obtain the total weights listed by the node colors. >>> nx.attr_matrix(G, edge_attr='weight', node_attr='color', rc_order=rc) matrix([[ 3., 2.], [ 2., 0.]]) Thus, the total weight over all edges (u,v) with u and v having colors: (red, red) is 3 # the sole contribution is from edge (0,1) (red, blue) is 2 # contributions from edges (0,2) and (1,2) (blue, red) is 2 # same as (red, blue) since graph is undirected (blue, blue) is 0 # there are no edges with blue endpoints """ try: import numpy as np except ImportError: raise ImportError( "attr_matrix() requires numpy: http://scipy.org/ ") edge_value = _edge_value(G, edge_attr) node_value = _node_value(G, node_attr) if rc_order is None: ordering = list(set([node_value(n) for n in G])) else: ordering = rc_order N = len(ordering) undirected = not G.is_directed() index = dict(zip(ordering, range(N))) M = np.zeros((N,N), dtype=dtype, order=order) seen = set([]) for u,nbrdict in G.adjacency_iter(): for v in nbrdict: # Obtain the node attribute values. i, j = index[node_value(u)], index[node_value(v)] if v not in seen: M[i,j] += edge_value(u,v) if undirected: M[j,i] = M[i,j] if undirected: seen.add(u) if normalized: M /= M.sum(axis=1).reshape((N,1)) M = np.asmatrix(M) if rc_order is None: return M, ordering else: return M def attr_sparse_matrix(G, edge_attr=None, node_attr=None, normalized=False, rc_order=None, dtype=None): """Returns a SciPy sparse matrix using attributes from G. If only `G` is passed in, then the adjacency matrix is constructed. Let A be a discrete set of values for the node attribute `node_attr`. Then the elements of A represent the rows and columns of the constructed matrix. Now, iterate through every edge e=(u,v) in `G` and consider the value of the edge attribute `edge_attr`. If ua and va are the values of the node attribute `node_attr` for u and v, respectively, then the value of the edge attribute is added to the matrix element at (ua, va). Parameters ---------- G : graph The NetworkX graph used to construct the NumPy matrix. edge_attr : str, optional Each element of the matrix represents a running total of the specified edge attribute for edges whose node attributes correspond to the rows/cols of the matirx. The attribute must be present for all edges in the graph. If no attribute is specified, then we just count the number of edges whose node attributes correspond to the matrix element. node_attr : str, optional Each row and column in the matrix represents a particular value of the node attribute. The attribute must be present for all nodes in the graph. Note, the values of this attribute should be reliably hashable. So, float values are not recommended. If no attribute is specified, then the rows and columns will be the nodes of the graph. normalized : bool, optional If True, then each row is normalized by the summation of its values. rc_order : list, optional A list of the node attribute values. This list specifies the ordering of rows and columns of the array. If no ordering is provided, then the ordering will be random (and also, a return value). Other Parameters ---------------- dtype : NumPy data-type, optional A valid NumPy dtype used to initialize the array. Keep in mind certain dtypes can yield unexpected results if the array is to be normalized. The parameter is passed to numpy.zeros(). If unspecified, the NumPy default is used. Returns ------- M : SciPy sparse matrix The attribute matrix. ordering : list If `rc_order` was specified, then only the matrix is returned. However, if `rc_order` was None, then the ordering used to construct the matrix is returned as well. Examples -------- Construct an adjacency matrix: >>> G = nx.Graph() >>> G.add_edge(0,1,thickness=1,weight=3) >>> G.add_edge(0,2,thickness=2) >>> G.add_edge(1,2,thickness=3) >>> M = nx.attr_sparse_matrix(G, rc_order=[0,1,2]) >>> M.todense() matrix([[ 0., 1., 1.], [ 1., 0., 1.], [ 1., 1., 0.]]) Alternatively, we can obtain the matrix describing edge thickness. >>> M = nx.attr_sparse_matrix(G, edge_attr='thickness', rc_order=[0,1,2]) >>> M.todense() matrix([[ 0., 1., 2.], [ 1., 0., 3.], [ 2., 3., 0.]]) We can also color the nodes and ask for the probability distribution over all edges (u,v) describing: Pr(v has color Y | u has color X) >>> G.node[0]['color'] = 'red' >>> G.node[1]['color'] = 'red' >>> G.node[2]['color'] = 'blue' >>> rc = ['red', 'blue'] >>> M = nx.attr_sparse_matrix(G, node_attr='color', \ normalized=True, rc_order=rc) >>> M.todense() matrix([[ 0.33333333, 0.66666667], [ 1. , 0. ]]) For example, the above tells us that for all edges (u,v): Pr( v is red | u is red) = 1/3 Pr( v is blue | u is red) = 2/3 Pr( v is red | u is blue) = 1 Pr( v is blue | u is blue) = 0 Finally, we can obtain the total weights listed by the node colors. >>> M = nx.attr_sparse_matrix(G, edge_attr='weight',\ node_attr='color', rc_order=rc) >>> M.todense() matrix([[ 3., 2.], [ 2., 0.]]) Thus, the total weight over all edges (u,v) with u and v having colors: (red, red) is 3 # the sole contribution is from edge (0,1) (red, blue) is 2 # contributions from edges (0,2) and (1,2) (blue, red) is 2 # same as (red, blue) since graph is undirected (blue, blue) is 0 # there are no edges with blue endpoints """ try: import numpy as np from scipy import sparse except ImportError: raise ImportError( "attr_sparse_matrix() requires scipy: http://scipy.org/ ") edge_value = _edge_value(G, edge_attr) node_value = _node_value(G, node_attr) if rc_order is None: ordering = list(set([node_value(n) for n in G])) else: ordering = rc_order N = len(ordering) undirected = not G.is_directed() index = dict(zip(ordering, range(N))) M = sparse.lil_matrix((N,N), dtype=dtype) seen = set([]) for u,nbrdict in G.adjacency_iter(): for v in nbrdict: # Obtain the node attribute values. i, j = index[node_value(u)], index[node_value(v)] if v not in seen: M[i,j] += edge_value(u,v) if undirected: M[j,i] = M[i,j] if undirected: seen.add(u) if normalized: norms = np.asarray(M.sum(axis=1)).ravel() for i,norm in enumerate(norms): M[i,:] /= norm if rc_order is None: return M, ordering else: return M # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/networkx/linalg/__init__.py0000664000175000017500000000046512177456333020542 0ustar aricaric00000000000000from networkx.linalg.attrmatrix import * import networkx.linalg.attrmatrix from networkx.linalg.spectrum import * import networkx.linalg.spectrum from networkx.linalg.graphmatrix import * import networkx.linalg.graphmatrix from networkx.linalg.laplacianmatrix import * import networkx.linalg.laplacianmatrix networkx-1.8.1/networkx/linalg/spectrum.py0000664000175000017500000000433712177456333020647 0ustar aricaric00000000000000""" Eigenvalue spectrum of graphs. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) __all__ = ['laplacian_spectrum', 'adjacency_spectrum'] def laplacian_spectrum(G, weight='weight'): """Return eigenvalues of the Laplacian of G Parameters ---------- G : graph A NetworkX graph weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. Returns ------- evals : NumPy array Eigenvalues Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. See Also -------- laplacian_matrix """ try: import numpy as np except ImportError: raise ImportError( "laplacian_spectrum() requires NumPy: http://scipy.org/ ") return np.linalg.eigvals(nx.laplacian_matrix(G,weight=weight)) def adjacency_spectrum(G, weight='weight'): """Return eigenvalues of the adjacency matrix of G. Parameters ---------- G : graph A NetworkX graph weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. Returns ------- evals : NumPy array Eigenvalues Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. See Also -------- adjacency_matrix """ try: import numpy as np except ImportError: raise ImportError( "adjacency_spectrum() requires NumPy: http://scipy.org/ ") return np.linalg.eigvals(nx.adjacency_matrix(G,weight=weight)) # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/linalg/laplacianmatrix.py0000664000175000017500000002022112177456333022144 0ustar aricaric00000000000000""" Laplacian matrix of graphs. """ # Copyright (C) 2004-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx from networkx.utils import require, not_implemented_for __author__ = "\n".join(['Aric Hagberg ', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult (dschult@colgate.edu)', 'Alejandro Weinstein ']) __all__ = ['laplacian_matrix', 'normalized_laplacian_matrix', 'directed_laplacian_matrix'] @require('numpy') @not_implemented_for('directed') def laplacian_matrix(G, nodelist=None, weight='weight'): """Return the Laplacian matrix of G. The graph Laplacian is the matrix L = D - A, where A is the adjacency matrix and D is the diagonal matrix of node degrees. Parameters ---------- G : graph A NetworkX graph nodelist : list, optional The rows and columns are ordered according to the nodes in nodelist. If nodelist is None, then the ordering is produced by G.nodes(). weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. Returns ------- L : NumPy matrix The Laplacian matrix of G. Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. See Also -------- to_numpy_matrix normalized_laplacian_matrix """ import numpy as np if nodelist is None: nodelist = G.nodes() if G.is_multigraph(): # this isn't the fastest way to do this... A = np.asarray(nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight)) I = np.identity(A.shape[0]) D = I*np.sum(A,axis=1) L = D - A else: # Graph or DiGraph, this is faster than above n = len(nodelist) index = dict( (n,i) for i,n in enumerate(nodelist) ) L = np.zeros((n,n)) for ui,u in enumerate(nodelist): totalwt = 0.0 for v,d in G[u].items(): try: vi = index[v] except KeyError: continue wt = d.get(weight,1) L[ui,vi] = -wt totalwt += wt L[ui,ui] = totalwt return np.asmatrix(L) @require('numpy') @not_implemented_for('directed') def normalized_laplacian_matrix(G, nodelist=None, weight='weight'): r"""Return the normalized Laplacian matrix of G. The normalized graph Laplacian is the matrix .. math:: NL = D^{-1/2} L D^{-1/2} where `L` is the graph Laplacian and `D` is the diagonal matrix of node degrees. Parameters ---------- G : graph A NetworkX graph nodelist : list, optional The rows and columns are ordered according to the nodes in nodelist. If nodelist is None, then the ordering is produced by G.nodes(). weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. Returns ------- L : NumPy matrix The normalized Laplacian matrix of G. Notes ----- For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. If the Graph contains selfloops, D is defined as diag(sum(A,1)), where A is the adjencency matrix [2]_. See Also -------- laplacian_matrix References ---------- .. [1] Fan Chung-Graham, Spectral Graph Theory, CBMS Regional Conference Series in Mathematics, Number 92, 1997. .. [2] Steve Butler, Interlacing For Weighted Graphs Using The Normalized Laplacian, Electronic Journal of Linear Algebra, Volume 16, pp. 90-98, March 2007. """ import numpy as np if G.is_multigraph(): L = laplacian_matrix(G, nodelist=nodelist, weight=weight) D = np.diag(L) elif G.number_of_selfloops() == 0: L = laplacian_matrix(G, nodelist=nodelist, weight=weight) D = np.diag(L) else: A = np.array(nx.adj_matrix(G)) D = np.sum(A, 1) L = np.diag(D) - A # Handle div by 0. It happens if there are unconnected nodes with np.errstate(divide='ignore'): Disqrt = np.diag(1 / np.sqrt(D)) Disqrt[np.isinf(Disqrt)] = 0 Ln = np.dot(Disqrt, np.dot(L,Disqrt)) return Ln ############################################################################### # Code based on # https://bitbucket.org/bedwards/networkx-community/src/370bd69fc02f/networkx/algorithms/community/ @require('numpy') @not_implemented_for('undirected') @not_implemented_for('multigraph') def directed_laplacian_matrix(G, nodelist=None, weight='weight', walk_type=None, alpha=0.95): r"""Return the directed Laplacian matrix of G. The graph directed Laplacian is the matrix .. math:: L = I - (\Phi^{1/2} P \Phi^{-1/2} + \Phi^{-1/2} P^T \Phi^{1/2} ) / 2 where `I` is the identity matrix, `P` is the transition matrix of the graph, and `\Phi` a matrix with the Perron vector of `P` in the diagonal and zeros elsewhere. Depending on the value of walk_type, `P` can be the transition matrix induced by a random walk, a lazy random walk, or a random walk with teleportation (PageRank). Parameters ---------- G : DiGraph A NetworkX graph nodelist : list, optional The rows and columns are ordered according to the nodes in nodelist. If nodelist is None, then the ordering is produced by G.nodes(). weight : string or None, optional (default='weight') The edge data key used to compute each value in the matrix. If None, then each edge has weight 1. walk_type : string or None, optional (default=None) If None, `P` is selected depending on the properties of the graph. Otherwise is one of 'random', 'lazy', or 'pagerank' alpha : real (1 - alpha) is the teleportation probability used with pagerank Returns ------- L : NumPy array Normalized Laplacian of G. Raises ------ NetworkXError If NumPy cannot be imported NetworkXNotImplemnted If G is not a DiGraph Notes ----- Only implemented for DiGraphs See Also -------- laplacian_matrix References ---------- .. [1] Fan Chung (2005). Laplacians and the Cheeger inequality for directed graphs. Annals of Combinatorics, 9(1), 2005 """ import numpy as np if walk_type is None: if nx.is_strongly_connected(G): if nx.is_aperiodic(G): walk_type = "random" else: walk_type = "lazy" else: walk_type = "pagerank" M = nx.to_numpy_matrix(G, nodelist=nodelist, weight=weight) n, m = M.shape if walk_type in ["random", "lazy"]: DI = np.diagflat(1.0 / np.sum(M, axis=1)) if walk_type == "random": P = DI * M else: I = np.identity(n) P = (I + DI * M) / 2.0 elif walk_type == "pagerank": if not (0 < alpha < 1): raise nx.NetworkXError('alpha must be between 0 and 1') # add constant to dangling nodes' row dangling = np.where(M.sum(axis=1) == 0) for d in dangling[0]: M[d] = 1.0 / n # normalize M = M / M.sum(axis=1) P = alpha * M + (1 - alpha) / n else: raise nx.NetworkXError("walk_type must be random, lazy, or pagerank") evals, evecs = np.linalg.eig(P.T) index = evals.argsort()[-1] # index of largest eval,evec # eigenvector of largest eigenvalue at ind[-1] v = np.array(evecs[:,index]).flatten().real p = v / v.sum() sp = np.sqrt(p) Q = np.diag(sp) * P * np.diag(1.0/sp) I = np.identity(len(G)) return I - (Q + Q.T) /2.0 # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/linalg/tests/0000775000175000017500000000000012177457361017570 5ustar aricaric00000000000000networkx-1.8.1/networkx/linalg/tests/test_graphmatrix.py0000664000175000017500000000772012177456333023533 0ustar aricaric00000000000000from nose import SkipTest import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph class TestGraphMatrix(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global numpy global assert_equal global assert_almost_equal try: import numpy from numpy.testing import assert_equal,assert_almost_equal except ImportError: raise SkipTest('NumPy not available.') def setUp(self): deg=[3,2,2,1,0] self.G=havel_hakimi_graph(deg) self.OI=numpy.array([[-1, -1, -1, 0], [1, 0, 0, -1], [0, 1, 0, 1], [0, 0, 1, 0], [0, 0, 0, 0]]) self.A=numpy.array([[0, 1, 1, 1, 0], [1, 0, 1, 0, 0], [1, 1, 0, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 0, 0]]) self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3}) for (u,v) in self.G.edges_iter() ) self.WG.add_node(4) self.WA=numpy.array([[0 , 0.5, 0.5, 0.5, 0], [0.5, 0 , 0.5, 0 , 0], [0.5, 0.5, 0 , 0 , 0], [0.5, 0 , 0 , 0 , 0], [0 , 0 , 0 , 0 , 0]]) self.MG=nx.MultiGraph(self.G) self.MG2=self.MG.copy() self.MG2.add_edge(0,1) self.MG2A=numpy.array([[0, 2, 1, 1, 0], [2, 0, 1, 0, 0], [1, 1, 0, 0, 0], [1, 0, 0, 0, 0], [0, 0, 0, 0, 0]]) self.MGOI=numpy.array([[-1, -1, -1, -1, 0], [1, 1, 0, 0, -1], [0, 0, 1, 0, 1], [0, 0, 0, 1, 0], [0, 0, 0, 0, 0]]) def test_incidence_matrix(self): "Conversion to incidence matrix" assert_equal(nx.incidence_matrix(self.G,oriented=True),self.OI) assert_equal(nx.incidence_matrix(self.G),numpy.abs(self.OI)) assert_equal(nx.incidence_matrix(self.MG,oriented=True),self.OI) assert_equal(nx.incidence_matrix(self.MG),numpy.abs(self.OI)) assert_equal(nx.incidence_matrix(self.MG2,oriented=True),self.MGOI) assert_equal(nx.incidence_matrix(self.MG2),numpy.abs(self.MGOI)) assert_equal(nx.incidence_matrix(self.WG,oriented=True),self.OI) assert_equal(nx.incidence_matrix(self.WG),numpy.abs(self.OI)) assert_equal(nx.incidence_matrix(self.WG,oriented=True, weight='weight'),0.5*self.OI) assert_equal(nx.incidence_matrix(self.WG,weight='weight'), numpy.abs(0.5*self.OI)) assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other'), 0.3*self.OI) WMG=nx.MultiGraph(self.WG) WMG.add_edge(0,1,attr_dict={'weight':0.5,'other':0.3}) assert_equal(nx.incidence_matrix(WMG,weight='weight'), numpy.abs(0.5*self.MGOI)) assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True), 0.5*self.MGOI) assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True), 0.3*self.MGOI) def test_adjacency_matrix(self): "Conversion to adjacency matrix" assert_equal(nx.adj_matrix(self.G),self.A) assert_equal(nx.adj_matrix(self.MG),self.A) assert_equal(nx.adj_matrix(self.MG2),self.MG2A) assert_equal(nx.adj_matrix(self.G,nodelist=[0,1]),self.A[:2,:2]) assert_equal(nx.adj_matrix(self.WG),self.WA) assert_equal(nx.adj_matrix(self.WG,weight=None),self.A) assert_equal(nx.adj_matrix(self.MG2,weight=None),self.MG2A) assert_equal(nx.adj_matrix(self.WG,weight='other'),0.6*self.WA) networkx-1.8.1/networkx/linalg/tests/test_spectrum.py0000664000175000017500000000273412177456333023047 0ustar aricaric00000000000000from nose import SkipTest import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph class TestSpectrum(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global numpy global assert_equal global assert_almost_equal try: import numpy from numpy.testing import assert_equal,assert_almost_equal except ImportError: raise SkipTest('NumPy not available.') def setUp(self): deg=[3,2,2,1,0] self.G=havel_hakimi_graph(deg) self.P=nx.path_graph(3) self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3}) for (u,v) in self.G.edges_iter() ) self.WG.add_node(4) def test_laplacian_spectrum(self): "Laplacian eigenvalues" evals=numpy.array([0, 0, 1, 3, 4]) e=sorted(nx.laplacian_spectrum(self.G)) assert_almost_equal(e,evals) e=sorted(nx.laplacian_spectrum(self.WG,weight=None)) assert_almost_equal(e,evals) e=sorted(nx.laplacian_spectrum(self.WG)) assert_almost_equal(e,0.5*evals) e=sorted(nx.laplacian_spectrum(self.WG,weight='other')) assert_almost_equal(e,0.3*evals) def test_adjacency_spectrum(self): "Adjacency eigenvalues" evals=numpy.array([-numpy.sqrt(2), 0, numpy.sqrt(2)]) e=sorted(nx.adjacency_spectrum(self.P)) assert_almost_equal(e,evals) networkx-1.8.1/networkx/linalg/tests/test_laplacian.py0000664000175000017500000001205112177456333023122 0ustar aricaric00000000000000from nose import SkipTest import networkx as nx from networkx.generators.degree_seq import havel_hakimi_graph class TestLaplacian(object): numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test @classmethod def setupClass(cls): global numpy global assert_equal global assert_almost_equal try: import numpy from numpy.testing import assert_equal,assert_almost_equal except ImportError: raise SkipTest('NumPy not available.') def setUp(self): deg=[3,2,2,1,0] self.G=havel_hakimi_graph(deg) self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3}) for (u,v) in self.G.edges_iter() ) self.WG.add_node(4) self.MG=nx.MultiGraph(self.G) # Graph with selfloops self.Gsl = self.G.copy() for node in self.Gsl.nodes(): self.Gsl.add_edge(node, node) def test_laplacian(self): "Graph Laplacian" NL=numpy.array([[ 3, -1, -1, -1, 0], [-1, 2, -1, 0, 0], [-1, -1, 2, 0, 0], [-1, 0, 0, 1, 0], [ 0, 0, 0, 0, 0]]) WL=0.5*NL OL=0.3*NL assert_equal(nx.laplacian_matrix(self.G),NL) assert_equal(nx.laplacian_matrix(self.MG),NL) assert_equal(nx.laplacian_matrix(self.G,nodelist=[0,1]), numpy.array([[ 1, -1],[-1, 1]])) assert_equal(nx.laplacian_matrix(self.WG),WL) assert_equal(nx.laplacian_matrix(self.WG,weight=None),NL) assert_equal(nx.laplacian_matrix(self.WG,weight='other'),OL) def test_normalized_laplacian(self): "Generalized Graph Laplacian" GL=numpy.array([[ 1.00, -0.408, -0.408, -0.577, 0.00], [-0.408, 1.00, -0.50, 0.00 , 0.00], [-0.408, -0.50, 1.00, 0.00, 0.00], [-0.577, 0.00, 0.00, 1.00, 0.00], [ 0.00, 0.00, 0.00, 0.00, 0.00]]) Lsl = numpy.array([[ 0.75 , -0.2887, -0.2887, -0.3536, 0.], [-0.2887, 0.6667, -0.3333, 0. , 0.], [-0.2887, -0.3333, 0.6667, 0. , 0.], [-0.3536, 0. , 0. , 0.5 , 0.], [ 0. , 0. , 0. , 0. , 0.]]) assert_almost_equal(nx.normalized_laplacian_matrix(self.G),GL,decimal=3) assert_almost_equal(nx.normalized_laplacian_matrix(self.MG),GL,decimal=3) assert_almost_equal(nx.normalized_laplacian_matrix(self.WG),GL,decimal=3) assert_almost_equal(nx.normalized_laplacian_matrix(self.WG,weight='other'),GL,decimal=3) assert_almost_equal(nx.normalized_laplacian_matrix(self.Gsl), Lsl, decimal=3) def test_directed_laplacian(self): "Directed Laplacian" # Graph used as an example in Sec. 4.1 of Langville and Meyer, # "Google's PageRank and Beyond". The graph contains dangling nodes, so # the pagerank random walk is selected by directed_laplacian G = nx.DiGraph() G.add_edges_from(((1,2), (1,3), (3,1), (3,2), (3,5), (4,5), (4,6), (5,4), (5,6), (6,4))) GL = numpy.array([[ 0.9833, -0.2941, -0.3882, -0.0291, -0.0231, -0.0261], [-0.2941, 0.8333, -0.2339, -0.0536, -0.0589, -0.0554], [-0.3882, -0.2339, 0.9833, -0.0278, -0.0896, -0.0251], [-0.0291, -0.0536, -0.0278, 0.9833, -0.4878, -0.6675], [-0.0231, -0.0589, -0.0896, -0.4878, 0.9833, -0.2078], [-0.0261, -0.0554, -0.0251, -0.6675, -0.2078, 0.9833]]) assert_almost_equal(nx.directed_laplacian_matrix(G, alpha=0.9), GL, decimal=3) # Make the graph strongly connected, so we can use a random and lazy walk G.add_edges_from((((2,5), (6,1)))) GL = numpy.array([[ 1. , -0.3062, -0.4714, 0. , 0. , -0.3227], [-0.3062, 1. , -0.1443, 0. , -0.3162, 0. ], [-0.4714, -0.1443, 1. , 0. , -0.0913, 0. ], [ 0. , 0. , 0. , 1. , -0.5 , -0.5 ], [ 0. , -0.3162, -0.0913, -0.5 , 1. , -0.25 ], [-0.3227, 0. , 0. , -0.5 , -0.25 , 1. ]]) assert_almost_equal(nx.directed_laplacian_matrix(G, walk_type='random'), GL, decimal=3) GL = numpy.array([[ 0.5 , -0.1531, -0.2357, 0. , 0. , -0.1614], [-0.1531, 0.5 , -0.0722, 0. , -0.1581, 0. ], [-0.2357, -0.0722, 0.5 , 0. , -0.0456, 0. ], [ 0. , 0. , 0. , 0.5 , -0.25 , -0.25 ], [ 0. , -0.1581, -0.0456, -0.25 , 0.5 , -0.125 ], [-0.1614, 0. , 0. , -0.25 , -0.125 , 0.5 ]]) assert_almost_equal(nx.directed_laplacian_matrix(G, walk_type='lazy'), GL, decimal=3) networkx-1.8.1/networkx/linalg/graphmatrix.py0000664000175000017500000001162012177456333021324 0ustar aricaric00000000000000""" Adjacency matrix and incidence matrix of graphs. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) __all__ = ['incidence_matrix', 'adj_matrix', 'adjacency_matrix', ] def incidence_matrix(G, nodelist=None, edgelist=None, oriented=False, weight=None): """Return incidence matrix of G. The incidence matrix assigns each row to a node and each column to an edge. For a standard incidence matrix a 1 appears wherever a row's node is incident on the column's edge. For an oriented incidence matrix each edge is assigned an orientation (arbitrarily for undirected and aligning to direction for directed). A -1 appears for the tail of an edge and 1 for the head of the edge. The elements are zero otherwise. Parameters ---------- G : graph A NetworkX graph nodelist : list, optional (default= all nodes in G) The rows are ordered according to the nodes in nodelist. If nodelist is None, then the ordering is produced by G.nodes(). edgelist : list, optional (default= all edges in G) The columns are ordered according to the edges in edgelist. If edgelist is None, then the ordering is produced by G.edges(). oriented: bool, optional (default=False) If True, matrix elements are +1 or -1 for the head or tail node respectively of each edge. If False, +1 occurs at both nodes. weight : string or None, optional (default=None) The edge data key used to provide each value in the matrix. If None, then each edge has weight 1. Edge weights, if used, should be positive so that the orientation can provide the sign. Returns ------- A : NumPy matrix The incidence matrix of G. Notes ----- For MultiGraph/MultiDiGraph, the edges in edgelist should be (u,v,key) 3-tuples. "Networks are the best discrete model for so many problems in applied mathematics" [1]_. References ---------- .. [1] Gil Strang, Network applications: A = incidence matrix, http://academicearth.org/lectures/network-applications-incidence-matrix """ try: import numpy as np except ImportError: raise ImportError( "incidence_matrix() requires numpy: http://scipy.org/ ") if nodelist is None: nodelist = G.nodes() if edgelist is None: if G.is_multigraph(): edgelist = G.edges(keys=True) else: edgelist = G.edges() A = np.zeros((len(nodelist),len(edgelist))) node_index = dict( (node,i) for i,node in enumerate(nodelist) ) for ei,e in enumerate(edgelist): (u,v) = e[:2] if u == v: continue # self loops give zero column try: ui = node_index[u] vi = node_index[v] except KeyError: raise NetworkXError('node %s or %s in edgelist ' 'but not in nodelist"%(u,v)') if weight is None: wt = 1 else: if G.is_multigraph(): ekey = e[2] wt = G[u][v][ekey].get(weight,1) else: wt = G[u][v].get(weight,1) if oriented: A[ui,ei] = -wt A[vi,ei] = wt else: A[ui,ei] = wt A[vi,ei] = wt return np.asmatrix(A) def adjacency_matrix(G, nodelist=None, weight='weight'): """Return adjacency matrix of G. Parameters ---------- G : graph A NetworkX graph nodelist : list, optional The rows and columns are ordered according to the nodes in nodelist. If nodelist is None, then the ordering is produced by G.nodes(). weight : string or None, optional (default='weight') The edge data key used to provide each value in the matrix. If None, then each edge has weight 1. Returns ------- A : numpy matrix Adjacency matrix representation of G. Notes ----- If you want a pure Python adjacency matrix representation try networkx.convert.to_dict_of_dicts which will return a dictionary-of-dictionaries format that can be addressed as a sparse matrix. For MultiGraph/MultiDiGraph, the edges weights are summed. See to_numpy_matrix for other options. See Also -------- to_numpy_matrix to_dict_of_dicts """ return nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight) adj_matrix=adjacency_matrix # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") networkx-1.8.1/networkx/generators/0000775000175000017500000000000012177457361017331 5ustar aricaric00000000000000networkx-1.8.1/networkx/generators/stochastic.py0000664000175000017500000000270712177456333022053 0ustar aricaric00000000000000"""Stocastic graph.""" # Copyright (C) 2010-2013 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx __author__ = "Aric Hagberg " __all__ = ['stochastic_graph'] def stochastic_graph(G, copy=True, weight='weight'): """Return a right-stochastic representation of G. A right-stochastic graph is a weighted digraph in which all of the node (out) neighbors edge weights sum to 1. Parameters ----------- G : graph A NetworkX graph copy : boolean, optional If True make a copy of the graph, otherwise modify the original graph weight : edge attribute key (optional, default='weight') Edge data key used for weight. If no attribute is found for an edge the edge weight is set to 1. """ if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise nx.NetworkXError('stochastic_graph not implemented ' 'for multigraphs') if not G.is_directed(): raise nx.NetworkXError('stochastic_graph not implemented ' 'for undirected graphs') if copy: W = nx.DiGraph(G) else: W = G # reference original graph, no copy degree = W.out_degree(weight=weight) for (u,v,d) in W.edges(data=True): d[weight] = float(d.get(weight,1.0))/degree[u] return W networkx-1.8.1/networkx/generators/threshold.py0000664000175000017500000007033212177456333021702 0ustar aricaric00000000000000""" Threshold Graphs - Creation, manipulation and identification. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult (dschult@colgate.edu)""" # Copyright (C) 2004-2008 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # __all__=[] import random # for swap_d from math import sqrt import networkx def is_threshold_graph(G): """ Returns True if G is a threshold graph. """ return is_threshold_sequence(list(G.degree().values())) def is_threshold_sequence(degree_sequence): """ Returns True if the sequence is a threshold degree seqeunce. Uses the property that a threshold graph must be constructed by adding either dominating or isolated nodes. Thus, it can be deconstructed iteratively by removing a node of degree zero or a node that connects to the remaining nodes. If this deconstruction failes then the sequence is not a threshold sequence. """ ds=degree_sequence[:] # get a copy so we don't destroy original ds.sort() while ds: if ds[0]==0: # if isolated node ds.pop(0) # remove it continue if ds[-1]!=len(ds)-1: # is the largest degree node dominating? return False # no, not a threshold degree sequence ds.pop() # yes, largest is the dominating node ds=[ d-1 for d in ds ] # remove it and decrement all degrees return True def creation_sequence(degree_sequence,with_labels=False,compact=False): """ Determines the creation sequence for the given threshold degree sequence. The creation sequence is a list of single characters 'd' or 'i': 'd' for dominating or 'i' for isolated vertices. Dominating vertices are connected to all vertices present when it is added. The first node added is by convention 'd'. This list can be converted to a string if desired using "".join(cs) If with_labels==True: Returns a list of 2-tuples containing the vertex number and a character 'd' or 'i' which describes the type of vertex. If compact==True: Returns the creation sequence in a compact form that is the number of 'i's and 'd's alternating. Examples: [1,2,2,3] represents d,i,i,d,d,i,i,i [3,1,2] represents d,d,d,i,d,d Notice that the first number is the first vertex to be used for construction and so is always 'd'. with_labels and compact cannot both be True. Returns None if the sequence is not a threshold sequence """ if with_labels and compact: raise ValueError("compact sequences cannot be labeled") # make an indexed copy if isinstance(degree_sequence,dict): # labeled degree seqeunce ds = [ [degree,label] for (label,degree) in degree_sequence.items() ] else: ds=[ [d,i] for i,d in enumerate(degree_sequence) ] ds.sort() cs=[] # creation sequence while ds: if ds[0][0]==0: # isolated node (d,v)=ds.pop(0) if len(ds)>0: # make sure we start with a d cs.insert(0,(v,'i')) else: cs.insert(0,(v,'d')) continue if ds[-1][0]!=len(ds)-1: # Not dominating node return None # not a threshold degree sequence (d,v)=ds.pop() cs.insert(0,(v,'d')) ds=[ [d[0]-1,d[1]] for d in ds ] # decrement due to removing node if with_labels: return cs if compact: return make_compact(cs) return [ v[1] for v in cs ] # not labeled def make_compact(creation_sequence): """ Returns the creation sequence in a compact form that is the number of 'i's and 'd's alternating. Examples: [1,2,2,3] represents d,i,i,d,d,i,i,i. [3,1,2] represents d,d,d,i,d,d. Notice that the first number is the first vertex to be used for construction and so is always 'd'. Labeled creation sequences lose their labels in the compact representation. """ first=creation_sequence[0] if isinstance(first,str): # creation sequence cs = creation_sequence[:] elif isinstance(first,tuple): # labeled creation sequence cs = [ s[1] for s in creation_sequence ] elif isinstance(first,int): # compact creation sequence return creation_sequence else: raise TypeError("Not a valid creation sequence type") ccs=[] count=1 # count the run lengths of d's or i's. for i in range(1,len(cs)): if cs[i]==cs[i-1]: count+=1 else: ccs.append(count) count=1 ccs.append(count) # don't forget the last one return ccs def uncompact(creation_sequence): """ Converts a compact creation sequence for a threshold graph to a standard creation sequence (unlabeled). If the creation_sequence is already standard, return it. See creation_sequence. """ first=creation_sequence[0] if isinstance(first,str): # creation sequence return creation_sequence elif isinstance(first,tuple): # labeled creation sequence return creation_sequence elif isinstance(first,int): # compact creation sequence ccscopy=creation_sequence[:] else: raise TypeError("Not a valid creation sequence type") cs = [] while ccscopy: cs.extend(ccscopy.pop(0)*['d']) if ccscopy: cs.extend(ccscopy.pop(0)*['i']) return cs def creation_sequence_to_weights(creation_sequence): """ Returns a list of node weights which create the threshold graph designated by the creation sequence. The weights are scaled so that the threshold is 1.0. The order of the nodes is the same as that in the creation sequence. """ # Turn input sequence into a labeled creation sequence first=creation_sequence[0] if isinstance(first,str): # creation sequence if isinstance(creation_sequence,list): wseq = creation_sequence[:] else: wseq = list(creation_sequence) # string like 'ddidid' elif isinstance(first,tuple): # labeled creation sequence wseq = [ v[1] for v in creation_sequence] elif isinstance(first,int): # compact creation sequence wseq = uncompact(creation_sequence) else: raise TypeError("Not a valid creation sequence type") # pass through twice--first backwards wseq.reverse() w=0 prev='i' for j,s in enumerate(wseq): if s=='i': wseq[j]=w prev=s elif prev=='i': prev=s w+=1 wseq.reverse() # now pass through forwards for j,s in enumerate(wseq): if s=='d': wseq[j]=w prev=s elif prev=='d': prev=s w+=1 # Now scale weights if prev=='d': w+=1 wscale=1./float(w) return [ ww*wscale for ww in wseq] #return wseq def weights_to_creation_sequence(weights,threshold=1,with_labels=False,compact=False): """ Returns a creation sequence for a threshold graph determined by the weights and threshold given as input. If the sum of two node weights is greater than the threshold value, an edge is created between these nodes. The creation sequence is a list of single characters 'd' or 'i': 'd' for dominating or 'i' for isolated vertices. Dominating vertices are connected to all vertices present when it is added. The first node added is by convention 'd'. If with_labels==True: Returns a list of 2-tuples containing the vertex number and a character 'd' or 'i' which describes the type of vertex. If compact==True: Returns the creation sequence in a compact form that is the number of 'i's and 'd's alternating. Examples: [1,2,2,3] represents d,i,i,d,d,i,i,i [3,1,2] represents d,d,d,i,d,d Notice that the first number is the first vertex to be used for construction and so is always 'd'. with_labels and compact cannot both be True. """ if with_labels and compact: raise ValueError("compact sequences cannot be labeled") # make an indexed copy if isinstance(weights,dict): # labeled weights wseq = [ [w,label] for (label,w) in weights.items() ] else: wseq = [ [w,i] for i,w in enumerate(weights) ] wseq.sort() cs=[] # creation sequence cutoff=threshold-wseq[-1][0] while wseq: if wseq[0][0]0: # get new degree sequence on subgraph dsdict=H.degree() ds=[ [d,v] for v,d in dsdict.items() ] ds.sort() # Update threshold graph nodes if ds[-1][0]==0: # all are isolated cs.extend( zip( dsdict, ['i']*(len(ds)-1)+['d']) ) break # Done! # pull off isolated nodes while ds[0][0]==0: (d,iso)=ds.pop(0) cs.append((iso,'i')) # find new biggest node (d,bigv)=ds.pop() # add edges of star to t_g cs.append((bigv,'d')) # form subgraph of neighbors of big node H=H.subgraph(H.neighbors(bigv)) cs.reverse() return cs ### Properties of Threshold Graphs def triangles(creation_sequence): """ Compute number of triangles in the threshold graph with the given creation sequence. """ # shortcut algoritm that doesn't require computing number # of triangles at each node. cs=creation_sequence # alias dr=cs.count("d") # number of d's in sequence ntri=dr*(dr-1)*(dr-2)/6 # number of triangles in clique of nd d's # now add dr choose 2 triangles for every 'i' in sequence where # dr is the number of d's to the right of the current i for i,typ in enumerate(cs): if typ=="i": ntri+=dr*(dr-1)/2 else: dr-=1 return ntri def triangle_sequence(creation_sequence): """ Return triangle sequence for the given threshold graph creation sequence. """ cs=creation_sequence seq=[] dr=cs.count("d") # number of d's to the right of the current pos dcur=(dr-1)*(dr-2) // 2 # number of triangles through a node of clique dr irun=0 # number of i's in the last run drun=0 # number of d's in the last run for i,sym in enumerate(cs): if sym=="d": drun+=1 tri=dcur+(dr-1)*irun # new triangles at this d else: # cs[i]="i": if prevsym=="d": # new string of i's dcur+=(dr-1)*irun # accumulate shared shortest paths irun=0 # reset i run counter dr-=drun # reduce number of d's to right drun=0 # reset d run counter irun+=1 tri=dr*(dr-1) // 2 # new triangles at this i seq.append(tri) prevsym=sym return seq def cluster_sequence(creation_sequence): """ Return cluster sequence for the given threshold graph creation sequence. """ triseq=triangle_sequence(creation_sequence) degseq=degree_sequence(creation_sequence) cseq=[] for i,deg in enumerate(degseq): tri=triseq[i] if deg <= 1: # isolated vertex or single pair gets cc 0 cseq.append(0) continue max_size=(deg*(deg-1)) // 2 cseq.append(float(tri)/float(max_size)) return cseq def degree_sequence(creation_sequence): """ Return degree sequence for the threshold graph with the given creation sequence """ cs=creation_sequence # alias seq=[] rd=cs.count("d") # number of d to the right for i,sym in enumerate(cs): if sym=="d": rd-=1 seq.append(rd+i) else: seq.append(rd) return seq def density(creation_sequence): """ Return the density of the graph with this creation_sequence. The density is the fraction of possible edges present. """ N=len(creation_sequence) two_size=sum(degree_sequence(creation_sequence)) two_possible=N*(N-1) den=two_size/float(two_possible) return den def degree_correlation(creation_sequence): """ Return the degree-degree correlation over all edges. """ cs=creation_sequence s1=0 # deg_i*deg_j s2=0 # deg_i^2+deg_j^2 s3=0 # deg_i+deg_j m=0 # number of edges rd=cs.count("d") # number of d nodes to the right rdi=[ i for i,sym in enumerate(cs) if sym=="d"] # index of "d"s ds=degree_sequence(cs) for i,sym in enumerate(cs): if sym=="d": if i!=rdi[0]: print("Logic error in degree_correlation",i,rdi) raise ValueError rdi.pop(0) degi=ds[i] for dj in rdi: degj=ds[dj] s1+=degj*degi s2+=degi**2+degj**2 s3+=degi+degj m+=1 denom=(2*m*s2-s3*s3) numer=(4*m*s1-s3*s3) if denom==0: if numer==0: return 1 raise ValueError("Zero Denominator but Numerator is %s"%numer) return numer/float(denom) def shortest_path(creation_sequence,u,v): """ Find the shortest path between u and v in a threshold graph G with the given creation_sequence. For an unlabeled creation_sequence, the vertices u and v must be integers in (0,len(sequence)) refering to the position of the desired vertices in the sequence. For a labeled creation_sequence, u and v are labels of veritices. Use cs=creation_sequence(degree_sequence,with_labels=True) to convert a degree sequence to a creation sequence. Returns a list of vertices from u to v. Example: if they are neighbors, it returns [u,v] """ # Turn input sequence into a labeled creation sequence first=creation_sequence[0] if isinstance(first,str): # creation sequence cs = [(i,creation_sequence[i]) for i in range(len(creation_sequence))] elif isinstance(first,tuple): # labeled creation sequence cs = creation_sequence[:] elif isinstance(first,int): # compact creation sequence ci = uncompact(creation_sequence) cs = [(i,ci[i]) for i in range(len(ci))] else: raise TypeError("Not a valid creation sequence type") verts=[ s[0] for s in cs ] if v not in verts: raise ValueError("Vertex %s not in graph from creation_sequence"%v) if u not in verts: raise ValueError("Vertex %s not in graph from creation_sequence"%u) # Done checking if u==v: return [u] uindex=verts.index(u) vindex=verts.index(v) bigind=max(uindex,vindex) if cs[bigind][1]=='d': return [u,v] # must be that cs[bigind][1]=='i' cs=cs[bigind:] while cs: vert=cs.pop() if vert[1]=='d': return [u,vert[0],v] # All after u are type 'i' so no connection return -1 def shortest_path_length(creation_sequence,i): """ Return the shortest path length from indicated node to every other node for the threshold graph with the given creation sequence. Node is indicated by index i in creation_sequence unless creation_sequence is labeled in which case, i is taken to be the label of the node. Paths lengths in threshold graphs are at most 2. Length to unreachable nodes is set to -1. """ # Turn input sequence into a labeled creation sequence first=creation_sequence[0] if isinstance(first,str): # creation sequence if isinstance(creation_sequence,list): cs = creation_sequence[:] else: cs = list(creation_sequence) elif isinstance(first,tuple): # labeled creation sequence cs = [ v[1] for v in creation_sequence] i = [v[0] for v in creation_sequence].index(i) elif isinstance(first,int): # compact creation sequence cs = uncompact(creation_sequence) else: raise TypeError("Not a valid creation sequence type") # Compute N=len(cs) spl=[2]*N # length 2 to every node spl[i]=0 # except self which is 0 # 1 for all d's to the right for j in range(i+1,N): if cs[j]=="d": spl[j]=1 if cs[i]=='d': # 1 for all nodes to the left for j in range(i): spl[j]=1 # and -1 for any trailing i to indicate unreachable for j in range(N-1,0,-1): if cs[j]=="d": break spl[j]=-1 return spl def betweenness_sequence(creation_sequence,normalized=True): """ Return betweenness for the threshold graph with the given creation sequence. The result is unscaled. To scale the values to the iterval [0,1] divide by (n-1)*(n-2). """ cs=creation_sequence seq=[] # betweenness lastchar='d' # first node is always a 'd' dr=float(cs.count("d")) # number of d's to the right of curren pos irun=0 # number of i's in the last run drun=0 # number of d's in the last run dlast=0.0 # betweenness of last d for i,c in enumerate(cs): if c=='d': #cs[i]=="d": # betweennees = amt shared with eariler d's and i's # + new isolated nodes covered # + new paths to all previous nodes b=dlast + (irun-1)*irun/dr + 2*irun*(i-drun-irun)/dr drun+=1 # update counter else: # cs[i]="i": if lastchar=='d': # if this is a new run of i's dlast=b # accumulate betweenness dr-=drun # update number of d's to the right drun=0 # reset d counter irun=0 # reset i counter b=0 # isolated nodes have zero betweenness irun+=1 # add another i to the run seq.append(float(b)) lastchar=c # normalize by the number of possible shortest paths if normalized: order=len(cs) scale=1.0/((order-1)*(order-2)) seq=[ s*scale for s in seq ] return seq def eigenvectors(creation_sequence): """ Return a 2-tuple of Laplacian eigenvalues and eigenvectors for the threshold network with creation_sequence. The first value is a list of eigenvalues. The second value is a list of eigenvectors. The lists are in the same order so corresponding eigenvectors and eigenvalues are in the same position in the two lists. Notice that the order of the eigenvalues returned by eigenvalues(cs) may not correspond to the order of these eigenvectors. """ ccs=make_compact(creation_sequence) N=sum(ccs) vec=[0]*N val=vec[:] # get number of type d nodes to the right (all for first node) dr=sum(ccs[::2]) nn=ccs[0] vec[0]=[1./sqrt(N)]*N val[0]=0 e=dr dr-=nn type_d=True i=1 dd=1 while dd=0): raise ValueError("p must be in [0,1]") cs=['d'] # threshold sequences always start with a d for i in range(1,n): if random.random() < p: cs.append('d') else: cs.append('i') return cs # maybe *_d_threshold_sequence routines should # be (or be called from) a single routine with a more descriptive name # and a keyword parameter? def right_d_threshold_sequence(n,m): """ Create a skewed threshold graph with a given number of vertices (n) and a given number of edges (m). The routine returns an unlabeled creation sequence for the threshold graph. FIXME: describe algorithm """ cs=['d']+['i']*(n-1) # create sequence with n insolated nodes # m n*(n-1)/2: raise ValueError("Too many edges for this many nodes.") # connected case m >n-1 ind=n-1 sum=n-1 while sum n*(n-1)/2: raise ValueError("Too many edges for this many nodes.") # Connected case when M>N-1 cs[n-1]='d' sum=n-1 ind=1 while summ: # be sure not to change the first vertex cs[sum-m]='i' return cs def swap_d(cs,p_split=1.0,p_combine=1.0,seed=None): """ Perform a "swap" operation on a threshold sequence. The swap preserves the number of nodes and edges in the graph for the given sequence. The resulting sequence is still a threshold sequence. Perform one split and one combine operation on the 'd's of a creation sequence for a threshold graph. This operation maintains the number of nodes and edges in the graph, but shifts the edges from node to node maintaining the threshold quality of the graph. """ if not seed is None: random.seed(seed) # preprocess the creation sequence dlist= [ i for (i,node_type) in enumerate(cs[1:-1]) if node_type=='d' ] # split if random.random()>sys.stderr,"split at %s to %s and %s"%(choice,split_to,flip_side) # combine if random.random()= len(cs) or cs[target]=='d' or first_choice==second_choice: return cs # OK to combine cs[first_choice]='i' cs[second_choice]='i' cs[target]='d' # print >>sys.stderr,"combine %s and %s to make %s."%(first_choice,second_choice,target) return cs networkx-1.8.1/networkx/generators/intersection.py0000664000175000017500000000744712177456333022423 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Generators for random intersection graphs. """ # Copyright (C) 2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import random import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)']) __all__ = ['uniform_random_intersection_graph', 'k_random_intersection_graph', 'general_random_intersection_graph', ] def uniform_random_intersection_graph(n, m, p, seed=None): """Return a uniform random intersection graph. Parameters ---------- n : int The number of nodes in the first bipartite set (nodes) m : int The number of nodes in the second bipartite set (attributes) p : float Probability of connecting nodes between bipartite sets seed : int, optional Seed for random number generator (default=None). See Also -------- gnp_random_graph References ---------- .. [1] K.B. Singer-Cohen, Random Intersection Graphs, 1995, PhD thesis, Johns Hopkins University .. [2] Fill, J. A., Scheinerman, E. R., and Singer-Cohen, K. B., Random intersection graphs when m = !(n): An equivalence theorem relating the evolution of the g(n, m, p) and g(n, p) models. Random Struct. Algorithms 16, 2 (2000), 156–176. """ G=nx.bipartite_random_graph(n, m, p, seed=seed) return nx.projected_graph(G, range(n)) def k_random_intersection_graph(n,m,k): """Return a intersection graph with randomly chosen attribute sets for each node that are of equal size (k). Parameters ---------- n : int The number of nodes in the first bipartite set (nodes) m : int The number of nodes in the second bipartite set (attributes) k : float Size of attribute set to assign to each node. seed : int, optional Seed for random number generator (default=None). See Also -------- gnp_random_graph, uniform_random_intersection_graph References ---------- .. [1] Godehardt, E., and Jaworski, J. Two models of random intersection graphs and their applications. Electronic Notes in Discrete Mathematics 10 (2001), 129--132. """ G = nx.empty_graph(n + m) mset = range(n,n+m) for v in range(n): targets = random.sample(mset, k) G.add_edges_from(zip([v]*len(targets), targets)) return nx.projected_graph(G, range(n)) def general_random_intersection_graph(n,m,p): """Return a random intersection graph with independent probabilities for connections between node and attribute sets. Parameters ---------- n : int The number of nodes in the first bipartite set (nodes) m : int The number of nodes in the second bipartite set (attributes) p : list of floats of length m Probabilities for connecting nodes to each attribute seed : int, optional Seed for random number generator (default=None). See Also -------- gnp_random_graph, uniform_random_intersection_graph References ---------- .. [1] Nikoletseas, S. E., Raptopoulos, C., and Spirakis, P. G. The existence and efficient construction of large independent sets in general random intersection graphs. In ICALP (2004), J. D´ıaz, J. Karhum¨aki, A. Lepist¨o, and D. Sannella, Eds., vol. 3142 of Lecture Notes in Computer Science, Springer, pp. 1029–1040. """ if len(p)!=m: raise ValueError("Probability list p must have m elements.") G = nx.empty_graph(n + m) mset = range(n,n+m) for u in range(n): for v,q in zip(mset,p): if random.random() # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Pieter Swart (swart@lanl.gov)""" __all__ = ['graph_atlas_g'] from networkx.generators.small import make_small_graph def graph_atlas_g(): """ Return the list [G0,G1,...,G1252] of graphs as named in the Graph Atlas. G0,G1,...,G1252 are all graphs with up to 7 nodes. The graphs are listed: 1. in increasing order of number of nodes; 2. for a fixed number of nodes, in increasing order of the number of edges; 3. for fixed numbers of nodes and edges, in increasing order of the degree sequence, for example 111223 < 112222; 4. for fixed degree sequence, in increasing number of automorphisms. Note that indexing is set up so that for GAG=graph_atlas_g(), then G123=GAG[123] and G[0]=empty_graph(0) """ descr_list=[ ['edgelist', 'G0', 0, []], ['edgelist', 'G1', 1, []], ['edgelist', 'G2', 2, []], ['edgelist', 'G3', 2, [[1, 2]]], ['edgelist', 'G4', 3, []], ['edgelist', 'G5', 3, [[2, 3]]], ['edgelist', 'G6', 3, [[1, 2], [1, 3]]], ['edgelist', 'G7', 3, [[1, 2], [1, 3], [2, 3]]], ['edgelist', 'G8', 4, []], ['edgelist', 'G9', 4, [[4, 3]]], ['edgelist', 'G10', 4, [[4, 3], [4, 2]]], ['edgelist', 'G11', 4, [[1, 2], [4, 3]]], ['edgelist', 'G12', 4, [[4, 3], [2, 3], [4, 2]]], ['edgelist', 'G13', 4, [[4, 1], [4, 2], [4, 3]]], ['edgelist', 'G14', 4, [[1, 2], [2, 3], [1, 4]]], ['edgelist', 'G15', 4, [[4, 3], [2, 3], [4, 2], [4, 1]]], ['edgelist', 'G16', 4, [[1, 2], [2, 3], [3, 4], [1, 4]]], ['edgelist', 'G17', 4, [[1, 2], [1, 3], [1, 4], [2, 3], [3, 4]]], ['edgelist', 'G18', 4, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3]]], ['edgelist', 'G19', 5, []], ['edgelist', 'G20', 5, [[5, 4]]], ['edgelist', 'G21', 5, [[2, 3], [1, 2]]], ['edgelist', 'G22', 5, [[1, 3], [5, 4]]], ['edgelist', 'G23', 5, [[2, 3], [1, 2], [3, 1]]], ['edgelist', 'G24', 5, [[5, 4], [4, 3], [4, 2]]], ['edgelist', 'G25', 5, [[4, 3], [5, 4], [1, 5]]], ['edgelist', 'G26', 5, [[2, 3], [1, 2], [5, 4]]], ['edgelist', 'G27', 5, [[5, 4], [2, 3], [4, 2], [4, 3]]], ['edgelist', 'G28', 5, [[1, 4], [2, 1], [3, 2], [4, 3]]], ['edgelist', 'G29', 5, [[5, 4], [5, 1], [5, 2], [5, 3]]], ['edgelist', 'G30', 5, [[5, 1], [4, 2], [5, 4], [4, 3]]], ['edgelist', 'G31', 5, [[3, 4], [2, 3], [1, 2], [5, 1]]], ['edgelist', 'G32', 5, [[2, 3], [1, 2], [3, 1], [5, 4]]], ['edgelist', 'G33', 5, [[1, 4], [3, 1], [4, 3], [2, 1], [3, 2]]], ['edgelist', 'G34', 5, [[5, 3], [5, 4], [3, 4], [5, 2], [5, 1]]], ['edgelist', 'G35', 5, [[1, 2], [2, 3], [3, 4], [1, 5], [1, 3]]], ['edgelist', 'G36', 5, [[5, 1], [2, 3], [5, 4], [4, 3], [4, 2]]], ['edgelist', 'G37', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4]]], ['edgelist', 'G38', 5, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5]]], ['edgelist', 'G39', 5, [[2, 1], [5, 2], [5, 1], [1, 4], [2, 4], [4, 5]]], ['edgelist', 'G40', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4], [3, 2]]], ['edgelist', 'G41', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4], [4, 5]]], ['edgelist', 'G42', 5, [[1, 2], [5, 4], [3, 4], [5, 3], [5, 1], [5, 2]]], ['edgelist', 'G43', 5, [[1, 5], [4, 1], [5, 4], [3, 4], [2, 3], [1, 2]]], ['edgelist', 'G44', 5, [[3, 2], [1, 3], [4, 1], [2, 4], [5, 2], [1, 5]]], ['edgelist', 'G45', 5, [[5, 1], [2, 3], [5, 4], [4, 3], [4, 2], [5, 2], [3, 5]]], ['edgelist', 'G46', 5, [[5, 2], [3, 5], [4, 3], [2, 4], [4, 5], [1, 4], [5, 1]]], ['edgelist', 'G47', 5, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2]]], ['edgelist', 'G48', 5, [[3, 2], [1, 3], [4, 1], [2, 4], [5, 2], [1, 5], [3, 5]]], ['edgelist', 'G49', 5, [[2, 1], [5, 2], [3, 5], [4, 3], [2, 4], [5, 1], [4, 5], [1, 4]]], ['edgelist', 'G50', 5, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4]]], ['edgelist', 'G51', 5, [[1, 2], [4, 5], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5]]], ['edgelist', 'G52', 5, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5]]], ['edgelist', 'G53', 6, []], ['edgelist', 'G54', 6, [[6, 5]]], ['edgelist', 'G55', 6, [[1, 4], [6, 5]]], ['edgelist', 'G56', 6, [[2, 4], [2, 3]]], ['edgelist', 'G57', 6, [[2, 4], [3, 2], [4, 3]]], ['edgelist', 'G58', 6, [[1, 4], [6, 1], [5, 1]]], ['edgelist', 'G59', 6, [[5, 4], [6, 5], [1, 6]]], ['edgelist', 'G60', 6, [[5, 4], [6, 2], [6, 3]]], ['edgelist', 'G61', 6, [[2, 3], [4, 1], [6, 5]]], ['edgelist', 'G62', 6, [[1, 4], [5, 1], [6, 5], [1, 6]]], ['edgelist', 'G63', 6, [[4, 1], [6, 4], [5, 6], [1, 5]]], ['edgelist', 'G64', 6, [[6, 2], [6, 4], [6, 3], [1, 6]]], ['edgelist', 'G65', 6, [[5, 4], [4, 2], [5, 1], [4, 3]]], ['edgelist', 'G66', 6, [[1, 3], [2, 4], [3, 2], [6, 4]]], ['edgelist', 'G67', 6, [[2, 4], [3, 2], [4, 3], [1, 6]]], ['edgelist', 'G68', 6, [[2, 3], [1, 4], [6, 1], [5, 1]]], ['edgelist', 'G69', 6, [[5, 6], [2, 3], [1, 6], [4, 5]]], ['edgelist', 'G70', 6, [[1, 3], [5, 1], [4, 2], [6, 4]]], ['edgelist', 'G71', 6, [[4, 1], [6, 4], [5, 6], [1, 5], [6, 1]]], ['edgelist', 'G72', 6, [[6, 4], [4, 2], [4, 3], [5, 4], [5, 6]]], ['edgelist', 'G73', 6, [[6, 4], [6, 5], [3, 4], [4, 5], [1, 5]]], ['edgelist', 'G74', 6, [[5, 4], [2, 3], [5, 1], [4, 3], [4, 2]]], ['edgelist', 'G75', 6, [[2, 5], [4, 5], [5, 1], [3, 2], [4, 3]]], ['edgelist', 'G76', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5]]], ['edgelist', 'G77', 6, [[6, 4], [6, 5], [6, 1], [6, 2], [6, 3]]], ['edgelist', 'G78', 6, [[2, 5], [6, 2], [2, 1], [3, 2], [3, 4]]], ['edgelist', 'G79', 6, [[1, 2], [4, 5], [1, 3], [4, 1], [6, 4]]], ['edgelist', 'G80', 6, [[2, 1], [3, 2], [3, 5], [2, 4], [6, 4]]], ['edgelist', 'G81', 6, [[5, 4], [1, 6], [5, 1], [4, 3], [4, 2]]], ['edgelist', 'G82', 6, [[2, 3], [1, 2], [5, 6], [2, 4], [3, 4]]], ['edgelist', 'G83', 6, [[1, 2], [1, 6], [3, 4], [4, 5], [5, 6]]], ['edgelist', 'G84', 6, [[5, 4], [6, 2], [6, 3], [1, 4], [5, 1]]], ['edgelist', 'G85', 6, [[2, 3], [4, 1], [6, 4], [5, 6], [1, 5]]], ['edgelist', 'G86', 6, [[1, 4], [6, 1], [5, 6], [4, 5], [6, 4], [5, 1]]], ['edgelist', 'G87', 6, [[2, 5], [3, 5], [5, 1], [3, 4], [4, 2], [4, 5]]], ['edgelist', 'G88', 6, [[2, 5], [3, 5], [5, 1], [3, 2], [4, 2], [3, 4]]], ['edgelist', 'G89', 6, [[3, 1], [6, 5], [5, 4], [6, 4], [5, 1], [3, 5]]], ['edgelist', 'G90', 6, [[4, 3], [5, 4], [1, 5], [2, 1], [3, 2], [1, 4]]], ['edgelist', 'G91', 6, [[5, 2], [4, 2], [5, 3], [4, 3], [3, 1], [2, 1]]], ['edgelist', 'G92', 6, [[6, 3], [6, 4], [6, 5], [4, 5], [6, 2], [6, 1]]], ['edgelist', 'G93', 6, [[5, 4], [5, 3], [5, 1], [2, 5], [4, 1], [6, 4]]], ['edgelist', 'G94', 6, [[5, 4], [4, 6], [6, 5], [6, 2], [4, 3], [5, 1]]], ['edgelist', 'G95', 6, [[5, 3], [2, 3], [5, 4], [5, 2], [5, 1], [1, 6]]], ['edgelist', 'G96', 6, [[2, 3], [4, 2], [1, 4], [3, 1], [5, 1], [6, 1]]], ['edgelist', 'G97', 6, [[3, 1], [5, 3], [2, 5], [3, 2], [4, 2], [6, 4]]], ['edgelist', 'G98', 6, [[2, 3], [4, 2], [1, 4], [3, 1], [5, 1], [6, 4]]], ['edgelist', 'G99', 6, [[6, 4], [3, 6], [3, 1], [5, 3], [5, 4], [4, 2]]], ['edgelist', 'G100', 6, [[1, 3], [4, 5], [2, 1], [6, 4], [5, 6], [4, 1]]], ['edgelist', 'G101', 6, [[2, 3], [4, 1], [6, 4], [5, 6], [1, 5], [6, 1]]], ['edgelist', 'G102', 6, [[5, 4], [2, 3], [5, 1], [4, 3], [4, 2], [6, 1]]], ['edgelist', 'G103', 6, [[2, 5], [3, 5], [5, 1], [1, 6], [4, 2], [3, 4]]], ['edgelist', 'G104', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6]]], ['edgelist', 'G105', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6]]], ['edgelist', 'G106', 6, [[2, 4], [3, 2], [4, 3], [1, 5], [6, 1], [5, 6]]], ['edgelist', 'G107', 6, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6]]], ['edgelist', 'G108', 6, [[2, 5], [3, 5], [3, 2], [4, 2], [3, 4], [3, 1], [1, 2]]], ['edgelist', 'G109', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2]]], ['edgelist', 'G110', 6, [[1, 2], [4, 3], [1, 3], [4, 1], [4, 2], [6, 2], [6, 3]]], ['edgelist', 'G111', 6, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [4, 5]]], ['edgelist', 'G112', 6, [[2, 1], [5, 2], [3, 5], [4, 3], [6, 2], [3, 6], [2, 3]]], ['edgelist', 'G113', 6, [[1, 5], [3, 1], [2, 3], [4, 2], [6, 4], [4, 1], [3, 4]]], ['edgelist', 'G114', 6, [[2, 5], [3, 5], [3, 4], [3, 2], [4, 2], [5, 6], [1, 5]]], ['edgelist', 'G115', 6, [[2, 1], [5, 2], [3, 5], [4, 3], [6, 2], [3, 6], [5, 6]]], ['edgelist', 'G116', 6, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5]]], ['edgelist', 'G117', 6, [[1, 6], [5, 1], [6, 5], [1, 3], [4, 1], [4, 3], [1, 2]]], ['edgelist', 'G118', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [5, 2]]], ['edgelist', 'G119', 6, [[1, 2], [5, 1], [2, 5], [1, 3], [4, 1], [4, 3], [4, 6]]], ['edgelist', 'G120', 6, [[2, 5], [3, 5], [5, 1], [1, 6], [4, 2], [3, 4], [4, 5]]], ['edgelist', 'G121', 6, [[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2]]], ['edgelist', 'G122', 6, [[2, 6], [1, 2], [5, 1], [4, 5], [3, 4], [2, 3], [1, 4]]], ['edgelist', 'G123', 6, [[2, 5], [3, 5], [5, 1], [1, 6], [4, 2], [3, 4], [3, 2]]], ['edgelist', 'G124', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [6, 2]]], ['edgelist', 'G125', 6, [[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4]]], ['edgelist', 'G126', 6, [[6, 1], [4, 6], [3, 4], [1, 3], [2, 4], [5, 2], [4, 5]]], ['edgelist', 'G127', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [3, 4]]], ['edgelist', 'G128', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4]]], ['edgelist', 'G129', 6, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4]]], ['edgelist', 'G130', 6, [[2, 3], [1, 2], [3, 1], [4, 1], [5, 4], [6, 5], [4, 6]]], ['edgelist', 'G131', 6, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2]]], ['edgelist', 'G132', 6, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4]]], ['edgelist', 'G133', 6, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5]]], ['edgelist', 'G134', 6, [[2, 3], [4, 2], [1, 4], [2, 1], [3, 1], [4, 3], [6, 4], [5, 1]]], ['edgelist', 'G135', 6, [[1, 2], [3, 5], [1, 3], [6, 3], [4, 2], [4, 3], [3, 2], [5, 2]]], ['edgelist', 'G136', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [2, 6]]], ['edgelist', 'G137', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5]]], ['edgelist', 'G138', 6, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2]]], ['edgelist', 'G139', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1]]], ['edgelist', 'G140', 6, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [4, 1], [6, 2]]], ['edgelist', 'G141', 6, [[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4]]], ['edgelist', 'G142', 6, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6], [6, 5]]], ['edgelist', 'G143', 6, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [6, 2], [6, 4]]], ['edgelist', 'G144', 6, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5]]], ['edgelist', 'G145', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3]]], ['edgelist', 'G146', 6, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4]]], ['edgelist', 'G147', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3]]], ['edgelist', 'G148', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [2, 5], [1, 2]]], ['edgelist', 'G149', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1]]], ['edgelist', 'G150', 6, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2]]], ['edgelist', 'G151', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6]]], ['edgelist', 'G152', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 2]]], ['edgelist', 'G153', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1]]], ['edgelist', 'G154', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [6, 3]]], ['edgelist', 'G155', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4]]], ['edgelist', 'G156', 6, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 3]]], ['edgelist', 'G157', 6, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5]]], ['edgelist', 'G158', 6, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [5, 6]]], ['edgelist', 'G159', 6, [[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4]]], ['edgelist', 'G160', 6, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 6]]], ['edgelist', 'G161', 6, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6]]], ['edgelist', 'G162', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1]]], ['edgelist', 'G163', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [1, 5], [2, 1], [5, 2]]], ['edgelist', 'G164', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2]]], ['edgelist', 'G165', 6, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1]]], ['edgelist', 'G166', 6, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6]]], ['edgelist', 'G167', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1]]], ['edgelist', 'G168', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5]]], ['edgelist', 'G169', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2]]], ['edgelist', 'G170', 6, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1]]], ['edgelist', 'G171', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4]]], ['edgelist', 'G172', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2]]], ['edgelist', 'G173', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3]]], ['edgelist', 'G174', 6, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3]]], ['edgelist', 'G175', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [6, 3], [5, 2]]], ['edgelist', 'G176', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3]]], ['edgelist', 'G177', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [5, 6]]], ['edgelist', 'G178', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 6]]], ['edgelist', 'G179', 6, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6], [2, 1]]], ['edgelist', 'G180', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 6], [2, 6]]], ['edgelist', 'G181', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [3, 5]]], ['edgelist', 'G182', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3]]], ['edgelist', 'G183', 6, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3]]], ['edgelist', 'G184', 6, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [6, 3]]], ['edgelist', 'G185', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2]]], ['edgelist', 'G186', 6, [[1, 2], [3, 5], [1, 3], [5, 6], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4]]], ['edgelist', 'G187', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5]]], ['edgelist', 'G188', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [1, 3], [2, 4], [6, 2]]], ['edgelist', 'G189', 6, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [4, 3], [1, 4]]], ['edgelist', 'G190', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [6, 4], [3, 6], [2, 1]]], ['edgelist', 'G191', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3], [2, 6]]], ['edgelist', 'G192', 6, [[1, 2], [3, 5], [1, 3], [3, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4]]], ['edgelist', 'G193', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6]]], ['edgelist', 'G194', 6, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [1, 3]]], ['edgelist', 'G195', 6, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [6, 4], [3, 6], [2, 1], [6, 2]]], ['edgelist', 'G196', 6, [[2, 4], [5, 2], [4, 5], [3, 4], [1, 3], [5, 1], [6, 5], [3, 6], [5, 3], [1, 6], [2, 6]]], ['edgelist', 'G197', 6, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2]]], ['edgelist', 'G198', 6, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3]]], ['edgelist', 'G199', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [1, 4]]], ['edgelist', 'G200', 6, [[1, 2], [2, 3], [1, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [5, 4]]], ['edgelist', 'G201', 6, [[4, 3], [2, 4], [3, 2], [1, 3], [6, 1], [3, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [1, 5]]], ['edgelist', 'G202', 6, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3], [5, 6]]], ['edgelist', 'G203', 6, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [3, 4]]], ['edgelist', 'G204', 6, [[1, 2], [2, 3], [1, 3], [4, 3], [4, 2], [5, 1], [3, 5], [6, 2], [1, 6], [5, 6], [4, 5], [6, 4]]], ['edgelist', 'G205', 6, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [3, 4], [1, 5]]], ['edgelist', 'G206', 6, [[1, 2], [2, 3], [1, 3], [4, 3], [4, 2], [5, 1], [3, 5], [6, 2], [1, 6], [5, 6], [4, 5], [6, 4], [4, 1]]], ['edgelist', 'G207', 6, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [2, 4], [3, 1], [5, 1], [6, 4]]], ['edgelist', 'G208', 6, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [5, 6]]], ['edgelist', 'G209', 7, []], ['edgelist', 'G210', 7, [[7, 6]]], ['edgelist', 'G211', 7, [[3, 4], [2, 3]]], ['edgelist', 'G212', 7, [[6, 5], [7, 1]]], ['edgelist', 'G213', 7, [[1, 5], [5, 3], [3, 1]]], ['edgelist', 'G214', 7, [[1, 2], [1, 7], [1, 6]]], ['edgelist', 'G215', 7, [[6, 5], [7, 1], [6, 7]]], ['edgelist', 'G216', 7, [[4, 3], [2, 3], [6, 7]]], ['edgelist', 'G217', 7, [[4, 2], [6, 7], [1, 5]]], ['edgelist', 'G218', 7, [[3, 6], [7, 3], [6, 7], [2, 3]]], ['edgelist', 'G219', 7, [[2, 3], [5, 2], [6, 5], [3, 6]]], ['edgelist', 'G220', 7, [[2, 1], [6, 2], [2, 3], [5, 2]]], ['edgelist', 'G221', 7, [[2, 1], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G222', 7, [[4, 5], [3, 4], [2, 3], [1, 2]]], ['edgelist', 'G223', 7, [[5, 3], [1, 5], [3, 1], [6, 7]]], ['edgelist', 'G224', 7, [[1, 2], [7, 1], [1, 6], [5, 3]]], ['edgelist', 'G225', 7, [[4, 2], [6, 5], [7, 6], [1, 7]]], ['edgelist', 'G226', 7, [[1, 5], [4, 1], [3, 6], [7, 3]]], ['edgelist', 'G227', 7, [[3, 4], [2, 3], [7, 1], [6, 5]]], ['edgelist', 'G228', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1]]], ['edgelist', 'G229', 7, [[3, 6], [7, 3], [6, 7], [5, 3], [4, 3]]], ['edgelist', 'G230', 7, [[5, 3], [5, 1], [3, 1], [6, 5], [7, 1]]], ['edgelist', 'G231', 7, [[3, 6], [7, 3], [6, 7], [2, 3], [1, 2]]], ['edgelist', 'G232', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [3, 2]]], ['edgelist', 'G233', 7, [[2, 3], [1, 2], [5, 1], [4, 5], [3, 4]]], ['edgelist', 'G234', 7, [[6, 2], [6, 1], [3, 6], [4, 6], [5, 6]]], ['edgelist', 'G235', 7, [[2, 6], [7, 2], [2, 1], [3, 2], [4, 3]]], ['edgelist', 'G236', 7, [[2, 6], [5, 2], [3, 4], [7, 3], [3, 2]]], ['edgelist', 'G237', 7, [[2, 6], [7, 2], [2, 3], [3, 4], [5, 4]]], ['edgelist', 'G238', 7, [[3, 2], [4, 3], [5, 4], [6, 5], [4, 7]]], ['edgelist', 'G239', 7, [[7, 6], [3, 7], [2, 3], [6, 3], [4, 5]]], ['edgelist', 'G240', 7, [[5, 4], [6, 5], [7, 6], [1, 7], [2, 1]]], ['edgelist', 'G241', 7, [[1, 5], [4, 1], [3, 6], [7, 3], [6, 7]]], ['edgelist', 'G242', 7, [[5, 2], [6, 3], [7, 6], [4, 7], [3, 4]]], ['edgelist', 'G243', 7, [[2, 5], [4, 2], [2, 1], [3, 2], [7, 6]]], ['edgelist', 'G244', 7, [[1, 5], [4, 1], [2, 1], [3, 2], [7, 6]]], ['edgelist', 'G245', 7, [[1, 5], [4, 1], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G246', 7, [[7, 6], [4, 5], [3, 4], [2, 3], [1, 2]]], ['edgelist', 'G247', 7, [[3, 4], [2, 3], [7, 1], [6, 7], [6, 5]]], ['edgelist', 'G248', 7, [[1, 2], [5, 7], [6, 5], [4, 3], [7, 6]]], ['edgelist', 'G249', 7, [[2, 6], [7, 2], [6, 7], [3, 6], [2, 3], [7, 3]]], ['edgelist', 'G250', 7, [[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2]]], ['edgelist', 'G251', 7, [[1, 5], [4, 1], [2, 4], [3, 2], [2, 5], [4, 5]]], ['edgelist', 'G252', 7, [[6, 3], [5, 6], [3, 5], [4, 3], [7, 4], [3, 7]]], ['edgelist', 'G253', 7, [[2, 3], [5, 2], [6, 5], [3, 6], [1, 2], [5, 1]]], ['edgelist', 'G254', 7, [[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1]]], ['edgelist', 'G255', 7, [[3, 6], [7, 3], [6, 7], [3, 5], [2, 3], [4, 3]]], ['edgelist', 'G256', 7, [[2, 5], [4, 2], [3, 4], [2, 3], [3, 6], [7, 3]]], ['edgelist', 'G257', 7, [[6, 5], [7, 6], [2, 7], [6, 2], [4, 7], [1, 2]]], ['edgelist', 'G258', 7, [[7, 6], [2, 7], [6, 2], [4, 2], [1, 4], [2, 5]]], ['edgelist', 'G259', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [3, 6], [7, 3]]], ['edgelist', 'G260', 7, [[2, 5], [4, 2], [3, 4], [2, 3], [3, 6], [7, 6]]], ['edgelist', 'G261', 7, [[3, 4], [2, 3], [4, 7], [6, 5], [7, 6], [6, 3]]], ['edgelist', 'G262', 7, [[3, 6], [7, 3], [6, 7], [2, 5], [4, 2], [3, 2]]], ['edgelist', 'G263', 7, [[5, 6], [1, 5], [4, 1], [3, 4], [5, 3], [7, 4]]], ['edgelist', 'G264', 7, [[1, 5], [4, 1], [2, 4], [7, 6], [2, 5], [2, 1]]], ['edgelist', 'G265', 7, [[2, 5], [4, 2], [3, 4], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G266', 7, [[7, 4], [6, 7], [5, 6], [2, 5], [3, 2], [6, 3]]], ['edgelist', 'G267', 7, [[2, 1], [4, 2], [7, 4], [6, 7], [5, 6], [2, 5]]], ['edgelist', 'G268', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6]]], ['edgelist', 'G269', 7, [[1, 5], [4, 1], [5, 4], [3, 6], [7, 3], [6, 7]]], ['edgelist', 'G270', 7, [[7, 4], [1, 7], [7, 3], [6, 7], [7, 2], [5, 7]]], ['edgelist', 'G271', 7, [[3, 5], [6, 3], [3, 4], [7, 3], [2, 3], [2, 1]]], ['edgelist', 'G272', 7, [[2, 1], [3, 2], [6, 3], [2, 5], [4, 2], [7, 3]]], ['edgelist', 'G273', 7, [[2, 1], [3, 2], [4, 7], [2, 4], [5, 2], [6, 5]]], ['edgelist', 'G274', 7, [[2, 1], [3, 2], [6, 3], [7, 6], [2, 5], [4, 2]]], ['edgelist', 'G275', 7, [[2, 1], [3, 5], [6, 3], [7, 6], [3, 7], [4, 3]]], ['edgelist', 'G276', 7, [[5, 1], [2, 5], [4, 2], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G277', 7, [[7, 6], [2, 3], [1, 2], [3, 1], [4, 3], [1, 5]]], ['edgelist', 'G278', 7, [[1, 5], [4, 1], [2, 1], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G279', 7, [[2, 1], [4, 2], [7, 4], [3, 7], [5, 2], [6, 5]]], ['edgelist', 'G280', 7, [[3, 6], [7, 3], [5, 3], [2, 5], [4, 2], [1, 4]]], ['edgelist', 'G281', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 3], [7, 6]]], ['edgelist', 'G282', 7, [[1, 5], [4, 1], [3, 2], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G283', 7, [[4, 5], [2, 1], [3, 2], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G284', 7, [[5, 6], [1, 5], [4, 1], [7, 4], [2, 1], [3, 2]]], ['edgelist', 'G285', 7, [[3, 6], [7, 3], [6, 7], [2, 5], [4, 2], [2, 1]]], ['edgelist', 'G286', 7, [[5, 6], [4, 5], [3, 4], [2, 3], [1, 2], [7, 1]]], ['edgelist', 'G287', 7, [[7, 5], [6, 7], [5, 6], [3, 4], [2, 3], [1, 2]]], ['edgelist', 'G288', 7, [[1, 2], [5, 1], [3, 4], [6, 3], [7, 6], [4, 7]]], ['edgelist', 'G289', 7, [[2, 3], [1, 2], [5, 1], [4, 5], [3, 4], [7, 6]]], ['edgelist', 'G290', 7, [[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [4, 5]]], ['edgelist', 'G291', 7, [[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [6, 3]]], ['edgelist', 'G292', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2]]], ['edgelist', 'G293', 7, [[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [2, 1]]], ['edgelist', 'G294', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [3, 6], [7, 3], [3, 1]]], ['edgelist', 'G295', 7, [[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [3, 7]]], ['edgelist', 'G296', 7, [[2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [4, 5], [7, 4]]], ['edgelist', 'G297', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [3, 6], [7, 3], [4, 5]]], ['edgelist', 'G298', 7, [[1, 5], [4, 1], [2, 4], [4, 7], [2, 5], [2, 1], [6, 5]]], ['edgelist', 'G299', 7, [[1, 5], [4, 1], [2, 4], [7, 6], [2, 5], [2, 1], [4, 5]]], ['edgelist', 'G300', 7, [[6, 3], [5, 6], [3, 5], [4, 3], [7, 4], [3, 7], [3, 2]]], ['edgelist', 'G301', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [3, 6]]], ['edgelist', 'G302', 7, [[6, 3], [5, 6], [3, 5], [4, 3], [7, 4], [3, 7], [4, 2]]], ['edgelist', 'G303', 7, [[2, 5], [4, 2], [3, 4], [5, 3], [3, 1], [3, 2], [7, 1]]], ['edgelist', 'G304', 7, [[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [4, 6]]], ['edgelist', 'G305', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [4, 6]]], ['edgelist', 'G306', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [2, 6]]], ['edgelist', 'G307', 7, [[4, 3], [5, 4], [4, 6], [3, 5], [6, 3], [7, 2], [7, 5]]], ['edgelist', 'G308', 7, [[2, 3], [6, 2], [5, 6], [3, 5], [1, 3], [6, 1], [1, 4]]], ['edgelist', 'G309', 7, [[4, 5], [2, 4], [3, 2], [7, 3], [6, 7], [2, 6], [5, 2]]], ['edgelist', 'G310', 7, [[1, 2], [5, 1], [2, 5], [3, 2], [4, 3], [6, 4], [5, 6]]], ['edgelist', 'G311', 7, [[7, 4], [6, 7], [2, 6], [3, 2], [4, 3], [5, 3], [6, 5]]], ['edgelist', 'G312', 7, [[2, 3], [5, 2], [6, 5], [7, 6], [4, 7], [3, 4], [6, 3]]], ['edgelist', 'G313', 7, [[5, 2], [4, 5], [2, 4], [3, 2], [7, 3], [6, 7], [3, 6]]], ['edgelist', 'G314', 7, [[4, 1], [7, 4], [1, 7], [2, 1], [1, 3], [6, 1], [1, 5]]], ['edgelist', 'G315', 7, [[2, 6], [7, 2], [2, 3], [4, 2], [5, 4], [2, 5], [5, 1]]], ['edgelist', 'G316', 7, [[6, 1], [7, 6], [1, 7], [6, 3], [2, 6], [7, 4], [5, 7]]], ['edgelist', 'G317', 7, [[5, 2], [1, 5], [2, 1], [3, 2], [1, 4], [7, 1], [5, 6]]], ['edgelist', 'G318', 7, [[6, 3], [7, 6], [3, 7], [3, 5], [4, 3], [2, 1], [3, 2]]], ['edgelist', 'G319', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [3, 2], [2, 6], [7, 2]]], ['edgelist', 'G320', 7, [[2, 1], [5, 2], [1, 5], [6, 5], [3, 2], [4, 3], [7, 2]]], ['edgelist', 'G321', 7, [[1, 2], [5, 1], [2, 5], [3, 2], [4, 3], [6, 5], [7, 5]]], ['edgelist', 'G322', 7, [[3, 4], [6, 3], [7, 6], [4, 7], [2, 3], [5, 6], [1, 6]]], ['edgelist', 'G323', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [3, 2], [7, 6]]], ['edgelist', 'G324', 7, [[3, 6], [7, 3], [6, 7], [5, 3], [2, 3], [1, 2], [4, 2]]], ['edgelist', 'G325', 7, [[3, 6], [7, 3], [5, 3], [2, 5], [4, 2], [3, 4], [1, 2]]], ['edgelist', 'G326', 7, [[7, 3], [6, 7], [3, 6], [2, 3], [1, 2], [5, 2], [4, 2]]], ['edgelist', 'G327', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [6, 5], [3, 2], [7, 4]]], ['edgelist', 'G328', 7, [[3, 6], [7, 3], [6, 7], [5, 6], [4, 7], [2, 3], [1, 2]]], ['edgelist', 'G329', 7, [[3, 6], [7, 3], [2, 5], [2, 3], [1, 2], [5, 1], [1, 4]]], ['edgelist', 'G330', 7, [[7, 6], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4], [4, 5]]], ['edgelist', 'G331', 7, [[5, 2], [1, 5], [2, 1], [4, 7], [3, 4], [1, 3], [6, 1]]], ['edgelist', 'G332', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [3, 2], [6, 3], [7, 2]]], ['edgelist', 'G333', 7, [[5, 2], [1, 5], [2, 1], [3, 4], [1, 3], [6, 1], [7, 6]]], ['edgelist', 'G334', 7, [[1, 2], [6, 1], [7, 6], [4, 7], [3, 4], [1, 3], [5, 1]]], ['edgelist', 'G335', 7, [[2, 1], [5, 2], [3, 5], [4, 3], [5, 4], [1, 5], [7, 6]]], ['edgelist', 'G336', 7, [[4, 7], [3, 4], [2, 3], [1, 2], [5, 1], [2, 5], [6, 5]]], ['edgelist', 'G337', 7, [[2, 1], [6, 2], [7, 6], [3, 7], [2, 3], [4, 3], [5, 4]]], ['edgelist', 'G338', 7, [[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [7, 6], [5, 2]]], ['edgelist', 'G339', 7, [[6, 3], [7, 6], [3, 7], [2, 3], [5, 2], [1, 5], [4, 2]]], ['edgelist', 'G340', 7, [[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [7, 6], [6, 3]]], ['edgelist', 'G341', 7, [[2, 5], [1, 2], [3, 1], [4, 3], [6, 4], [1, 6], [7, 4]]], ['edgelist', 'G342', 7, [[3, 2], [4, 3], [7, 4], [6, 7], [1, 6], [3, 1], [6, 5]]], ['edgelist', 'G343', 7, [[6, 3], [7, 6], [3, 7], [2, 3], [1, 2], [5, 1], [4, 1]]], ['edgelist', 'G344', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G345', 7, [[2, 1], [3, 2], [6, 3], [5, 6], [1, 5], [5, 2], [7, 4]]], ['edgelist', 'G346', 7, [[3, 6], [7, 3], [1, 5], [4, 1], [2, 4], [5, 2], [2, 1]]], ['edgelist', 'G347', 7, [[7, 6], [1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3]]], ['edgelist', 'G348', 7, [[3, 2], [6, 3], [5, 6], [1, 5], [4, 1], [7, 4], [3, 7]]], ['edgelist', 'G349', 7, [[5, 1], [4, 5], [2, 4], [3, 2], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G350', 7, [[7, 6], [3, 7], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4]]], ['edgelist', 'G351', 7, [[5, 2], [1, 5], [3, 1], [4, 3], [7, 4], [6, 7], [1, 6]]], ['edgelist', 'G352', 7, [[1, 5], [4, 1], [5, 4], [3, 2], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G353', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7], [1, 7]]], ['edgelist', 'G354', 7, [[2, 1], [5, 2], [1, 5], [6, 3], [7, 6], [4, 7], [3, 4]]], ['edgelist', 'G355', 7, [[1, 2], [5, 1], [6, 5], [3, 6], [2, 3], [6, 2], [5, 2], [3, 5]]], ['edgelist', 'G356', 7, [[5, 2], [6, 5], [3, 6], [2, 3], [1, 2], [6, 1], [1, 5], [3, 1]]], ['edgelist', 'G357', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [4, 5], [6, 2], [7, 2]]], ['edgelist', 'G358', 7, [[5, 2], [6, 5], [3, 6], [2, 3], [6, 2], [7, 6], [3, 5], [4, 3]]], ['edgelist', 'G359', 7, [[2, 4], [1, 2], [5, 1], [3, 5], [2, 3], [5, 2], [6, 5], [2, 6]]], ['edgelist', 'G360', 7, [[3, 1], [4, 3], [7, 4], [6, 7], [1, 6], [4, 1], [1, 7], [5, 1]]], ['edgelist', 'G361', 7, [[2, 1], [3, 2], [6, 3], [5, 6], [1, 5], [3, 1], [6, 1], [7, 6]]], ['edgelist', 'G362', 7, [[2, 1], [3, 2], [4, 3], [2, 4], [5, 4], [3, 5], [6, 3], [4, 6]]], ['edgelist', 'G363', 7, [[3, 1], [4, 3], [7, 4], [6, 7], [1, 6], [4, 1], [7, 1], [5, 6]]], ['edgelist', 'G364', 7, [[2, 1], [3, 2], [5, 4], [2, 6], [5, 2], [3, 5], [6, 3], [4, 6]]], ['edgelist', 'G365', 7, [[4, 6], [3, 2], [5, 4], [2, 6], [5, 2], [3, 5], [6, 3], [5, 7]]], ['edgelist', 'G366', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [4, 5], [3, 2], [6, 3]]], ['edgelist', 'G367', 7, [[4, 6], [3, 2], [5, 4], [2, 6], [5, 2], [3, 5], [6, 3], [1, 4]]], ['edgelist', 'G368', 7, [[5, 1], [3, 5], [1, 3], [4, 1], [3, 4], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G369', 7, [[4, 3], [7, 4], [6, 7], [3, 6], [1, 3], [6, 1], [5, 6], [3, 5]]], ['edgelist', 'G370', 7, [[1, 6], [5, 1], [3, 5], [6, 3], [2, 6], [5, 2], [4, 5], [6, 4]]], ['edgelist', 'G371', 7, [[3, 4], [2, 3], [5, 2], [6, 5], [2, 6], [6, 3], [7, 6], [4, 7]]], ['edgelist', 'G372', 7, [[6, 3], [5, 6], [1, 5], [4, 1], [7, 4], [3, 7], [5, 3], [4, 3]]], ['edgelist', 'G373', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3], [6, 5], [3, 6]]], ['edgelist', 'G374', 7, [[6, 7], [3, 6], [7, 3], [4, 3], [5, 4], [1, 5], [4, 1], [3, 5]]], ['edgelist', 'G375', 7, [[2, 1], [6, 1], [4, 3], [2, 4], [6, 3], [7, 2], [7, 3], [7, 6]]], ['edgelist', 'G376', 7, [[6, 5], [7, 6], [4, 7], [1, 4], [5, 1], [3, 5], [4, 3], [1, 3]]], ['edgelist', 'G377', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 3], [2, 6]]], ['edgelist', 'G378', 7, [[6, 1], [7, 3], [1, 7], [2, 1], [3, 2], [6, 3], [5, 6], [5, 7]]], ['edgelist', 'G379', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [3, 2], [2, 6], [7, 2]]], ['edgelist', 'G380', 7, [[1, 3], [5, 1], [2, 5], [1, 2], [4, 1], [2, 4], [6, 2], [7, 2]]], ['edgelist', 'G381', 7, [[5, 3], [1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [2, 6], [7, 2]]], ['edgelist', 'G382', 7, [[1, 5], [4, 1], [5, 4], [2, 5], [4, 2], [2, 6], [3, 2], [7, 2]]], ['edgelist', 'G383', 7, [[3, 2], [1, 3], [4, 1], [6, 4], [3, 6], [4, 3], [5, 4], [7, 6]]], ['edgelist', 'G384', 7, [[5, 3], [1, 5], [4, 1], [2, 4], [5, 2], [4, 5], [2, 6], [7, 2]]], ['edgelist', 'G385', 7, [[3, 2], [1, 3], [4, 1], [6, 4], [3, 6], [7, 6], [5, 4], [6, 1]]], ['edgelist', 'G386', 7, [[2, 1], [3, 2], [4, 3], [2, 4], [5, 3], [4, 5], [5, 6], [7, 5]]], ['edgelist', 'G387', 7, [[7, 6], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4], [1, 2], [4, 5]]], ['edgelist', 'G388', 7, [[1, 2], [7, 6], [3, 4], [7, 5], [7, 4], [7, 3], [7, 1], [7, 2]]], ['edgelist', 'G389', 7, [[7, 5], [2, 3], [3, 4], [7, 6], [5, 6], [7, 3], [7, 1], [7, 2]]], ['edgelist', 'G390', 7, [[1, 2], [2, 3], [3, 4], [7, 6], [7, 5], [7, 4], [7, 1], [7, 3]]], ['edgelist', 'G391', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [7, 2], [6, 2], [3, 6]]], ['edgelist', 'G392', 7, [[4, 1], [3, 4], [5, 3], [1, 5], [2, 1], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G393', 7, [[3, 2], [4, 3], [7, 4], [6, 7], [1, 6], [3, 1], [6, 3], [5, 6]]], ['edgelist', 'G394', 7, [[2, 1], [3, 2], [4, 3], [5, 4], [6, 3], [2, 6], [7, 2], [3, 7]]], ['edgelist', 'G395', 7, [[3, 6], [5, 3], [2, 5], [4, 2], [1, 4], [2, 1], [3, 2], [7, 3]]], ['edgelist', 'G396', 7, [[5, 6], [1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [7, 4]]], ['edgelist', 'G397', 7, [[1, 2], [5, 1], [2, 5], [3, 2], [5, 3], [6, 5], [2, 6], [7, 4]]], ['edgelist', 'G398', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [2, 7], [6, 1]]], ['edgelist', 'G399', 7, [[5, 6], [1, 5], [2, 1], [5, 2], [4, 1], [2, 4], [7, 2], [3, 7]]], ['edgelist', 'G400', 7, [[3, 6], [5, 3], [1, 5], [2, 1], [5, 2], [4, 1], [2, 4], [7, 2]]], ['edgelist', 'G401', 7, [[2, 7], [3, 2], [1, 3], [2, 1], [5, 2], [4, 5], [3, 4], [5, 6]]], ['edgelist', 'G402', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [2, 7], [6, 4]]], ['edgelist', 'G403', 7, [[1, 5], [4, 1], [5, 4], [2, 5], [4, 2], [6, 2], [7, 3], [2, 7]]], ['edgelist', 'G404', 7, [[3, 4], [2, 3], [1, 2], [6, 1], [5, 6], [1, 5], [3, 1], [7, 6]]], ['edgelist', 'G405', 7, [[5, 6], [1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3], [7, 3]]], ['edgelist', 'G406', 7, [[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [5, 2], [3, 7], [6, 3]]], ['edgelist', 'G407', 7, [[1, 2], [2, 3], [3, 4], [7, 4], [5, 6], [7, 3], [7, 1], [7, 2]]], ['edgelist', 'G408', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [1, 2], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G409', 7, [[1, 2], [2, 3], [3, 4], [7, 6], [5, 6], [7, 3], [7, 5], [7, 2]]], ['edgelist', 'G410', 7, [[1, 2], [5, 1], [1, 3], [6, 1], [7, 6], [4, 7], [3, 4], [6, 3]]], ['edgelist', 'G411', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [3, 6], [7, 3]]], ['edgelist', 'G412', 7, [[5, 6], [4, 5], [2, 4], [3, 2], [7, 3], [5, 7], [4, 3], [1, 2]]], ['edgelist', 'G413', 7, [[2, 1], [3, 7], [4, 3], [5, 4], [6, 3], [2, 6], [7, 2], [7, 6]]], ['edgelist', 'G414', 7, [[3, 4], [2, 3], [1, 2], [5, 1], [6, 5], [7, 6], [6, 3], [5, 2]]], ['edgelist', 'G415', 7, [[5, 2], [1, 5], [4, 1], [2, 4], [4, 5], [3, 2], [3, 6], [7, 3]]], ['edgelist', 'G416', 7, [[1, 7], [5, 1], [2, 5], [4, 2], [1, 4], [3, 5], [4, 3], [6, 3]]], ['edgelist', 'G417', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [4, 3], [2, 1], [7, 6]]], ['edgelist', 'G418', 7, [[1, 2], [5, 1], [4, 3], [7, 4], [6, 7], [3, 6], [7, 3], [4, 6]]], ['edgelist', 'G419', 7, [[6, 3], [7, 6], [3, 7], [5, 3], [1, 5], [4, 1], [3, 4], [2, 3]]], ['edgelist', 'G420', 7, [[3, 1], [2, 3], [1, 2], [6, 1], [5, 6], [1, 5], [7, 1], [4, 7]]], ['edgelist', 'G421', 7, [[1, 2], [3, 1], [4, 3], [3, 2], [2, 5], [6, 5], [6, 4], [2, 7]]], ['edgelist', 'G422', 7, [[2, 7], [3, 2], [1, 3], [2, 1], [5, 2], [4, 5], [3, 4], [6, 7]]], ['edgelist', 'G423', 7, [[7, 2], [1, 7], [2, 1], [6, 2], [1, 6], [3, 2], [4, 3], [5, 4]]], ['edgelist', 'G424', 7, [[7, 6], [3, 7], [2, 3], [5, 2], [4, 5], [1, 4], [5, 1], [3, 5]]], ['edgelist', 'G425', 7, [[2, 7], [1, 2], [6, 1], [2, 6], [4, 1], [5, 4], [3, 5], [1, 3]]], ['edgelist', 'G426', 7, [[3, 7], [5, 3], [1, 5], [2, 1], [5, 2], [4, 5], [6, 4], [3, 6]]], ['edgelist', 'G427', 7, [[2, 1], [3, 2], [7, 3], [6, 7], [2, 6], [5, 2], [4, 5], [3, 4]]], ['edgelist', 'G428', 7, [[7, 2], [5, 4], [2, 1], [6, 2], [4, 3], [3, 2], [5, 7], [6, 5]]], ['edgelist', 'G429', 7, [[5, 3], [1, 5], [2, 1], [5, 2], [4, 5], [7, 4], [6, 7], [4, 6]]], ['edgelist', 'G430', 7, [[5, 2], [3, 5], [1, 3], [7, 1], [4, 7], [1, 4], [6, 1], [5, 6]]], ['edgelist', 'G431', 7, [[6, 7], [5, 6], [1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2]]], ['edgelist', 'G432', 7, [[7, 4], [6, 7], [5, 6], [1, 5], [2, 1], [3, 2], [6, 3], [5, 2]]], ['edgelist', 'G433', 7, [[1, 2], [3, 1], [4, 3], [3, 2], [2, 5], [6, 5], [6, 4], [5, 7]]], ['edgelist', 'G434', 7, [[5, 1], [4, 5], [3, 4], [7, 3], [6, 7], [2, 6], [5, 2], [3, 2]]], ['edgelist', 'G435', 7, [[7, 2], [1, 7], [5, 4], [6, 2], [1, 6], [3, 2], [4, 3], [6, 7]]], ['edgelist', 'G436', 7, [[7, 3], [6, 7], [4, 6], [7, 4], [5, 4], [1, 5], [2, 1], [5, 2]]], ['edgelist', 'G437', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 7], [6, 2]]], ['edgelist', 'G438', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 7], [5, 3]]], ['edgelist', 'G439', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 7], [1, 6]]], ['edgelist', 'G440', 7, [[5, 1], [3, 5], [4, 3], [7, 4], [6, 7], [5, 6], [2, 3], [6, 2]]], ['edgelist', 'G441', 7, [[6, 2], [3, 5], [4, 3], [1, 4], [6, 1], [5, 6], [2, 3], [1, 7]]], ['edgelist', 'G442', 7, [[6, 7], [3, 6], [5, 3], [1, 5], [4, 1], [3, 4], [2, 5], [4, 2]]], ['edgelist', 'G443', 7, [[1, 5], [2, 1], [5, 2], [4, 5], [6, 4], [7, 6], [3, 7], [5, 3]]], ['edgelist', 'G444', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [7, 5], [1, 6], [7, 3], [7, 2]]], ['edgelist', 'G445', 7, [[2, 3], [1, 2], [5, 1], [6, 5], [3, 6], [4, 3], [7, 4], [6, 7]]], ['edgelist', 'G446', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 7], [2, 7]]], ['edgelist', 'G447', 7, [[7, 3], [6, 7], [3, 6], [2, 3], [5, 2], [1, 5], [4, 1], [2, 4]]], ['edgelist', 'G448', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 6], [7, 2]]], ['edgelist', 'G449', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 4]]], ['edgelist', 'G450', 7, [[1, 5], [2, 1], [4, 3], [2, 5], [3, 6], [6, 4], [7, 5], [7, 4]]], ['edgelist', 'G451', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [7, 3], [6, 7], [3, 6]]], ['edgelist', 'G452', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4]]], ['edgelist', 'G453', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 3]]], ['edgelist', 'G454', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5]]], ['edgelist', 'G455', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [5, 6]]], ['edgelist', 'G456', 7, [[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4]]], ['edgelist', 'G457', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 6]]], ['edgelist', 'G458', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6]]], ['edgelist', 'G459', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1]]], ['edgelist', 'G460', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [1, 5], [2, 1], [5, 2]]], ['edgelist', 'G461', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2]]], ['edgelist', 'G462', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1]]], ['edgelist', 'G463', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6]]], ['edgelist', 'G464', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1]]], ['edgelist', 'G465', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5]]], ['edgelist', 'G466', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2]]], ['edgelist', 'G467', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1]]], ['edgelist', 'G468', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4]]], ['edgelist', 'G469', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2]]], ['edgelist', 'G470', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3]]], ['edgelist', 'G471', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3]]], ['edgelist', 'G472', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [6, 3], [5, 2]]], ['edgelist', 'G473', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5], [1, 7]]], ['edgelist', 'G474', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5], [3, 7]]], ['edgelist', 'G475', 7, [[2, 3], [4, 2], [1, 4], [2, 1], [3, 1], [4, 3], [6, 4], [5, 1], [2, 7]]], ['edgelist', 'G476', 7, [[1, 2], [3, 5], [1, 3], [4, 2], [4, 3], [3, 2], [5, 2], [6, 3], [3, 7]]], ['edgelist', 'G477', 7, [[1, 2], [3, 5], [1, 3], [6, 3], [4, 2], [4, 3], [3, 2], [5, 2], [2, 7]]], ['edgelist', 'G478', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [2, 6], [2, 7]]], ['edgelist', 'G479', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [2, 6], [5, 7]]], ['edgelist', 'G480', 7, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2], [2, 7]]], ['edgelist', 'G481', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [5, 7]]], ['edgelist', 'G482', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 7]]], ['edgelist', 'G483', 7, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 7]]], ['edgelist', 'G484', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [2, 7]]], ['edgelist', 'G485', 7, [[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4], [3, 7]]], ['edgelist', 'G486', 7, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [4, 1], [6, 2], [1, 7]]], ['edgelist', 'G487', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 1], [1, 5], [6, 7]]], ['edgelist', 'G488', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [5, 7]]], ['edgelist', 'G489', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [3, 7]]], ['edgelist', 'G490', 7, [[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4], [6, 7]]], ['edgelist', 'G491', 7, [[2, 3], [4, 2], [1, 4], [2, 1], [3, 1], [4, 3], [5, 1], [7, 6], [7, 4]]], ['edgelist', 'G492', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [1, 7]]], ['edgelist', 'G493', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 7], [6, 5], [1, 4], [3, 5]]], ['edgelist', 'G494', 7, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [3, 2], [6, 2], [6, 7]]], ['edgelist', 'G495', 7, [[3, 1], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [6, 4], [5, 7]]], ['edgelist', 'G496', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 7]]], ['edgelist', 'G497', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [3, 7]]], ['edgelist', 'G498', 7, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [4, 1], [6, 2], [6, 7]]], ['edgelist', 'G499', 7, [[1, 2], [3, 6], [1, 3], [6, 5], [4, 2], [4, 3], [4, 1], [6, 2], [3, 7]]], ['edgelist', 'G500', 7, [[1, 2], [3, 6], [1, 3], [5, 1], [4, 2], [4, 3], [6, 2], [6, 4], [1, 7]]], ['edgelist', 'G501', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6], [6, 5], [6, 7]]], ['edgelist', 'G502', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 7]]], ['edgelist', 'G503', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [5, 7]]], ['edgelist', 'G504', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3], [1, 7]]], ['edgelist', 'G505', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [4, 7]]], ['edgelist', 'G506', 7, [[1, 2], [3, 5], [1, 3], [6, 3], [4, 2], [4, 3], [3, 2], [5, 2], [6, 7]]], ['edgelist', 'G507', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 7]]], ['edgelist', 'G508', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [3, 7]]], ['edgelist', 'G509', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [7, 6], [7, 2]]], ['edgelist', 'G510', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [3, 7]]], ['edgelist', 'G511', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [4, 5], [1, 7]]], ['edgelist', 'G512', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [2, 5]]], ['edgelist', 'G513', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [1, 7]]], ['edgelist', 'G514', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [5, 7]]], ['edgelist', 'G515', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [6, 7]]], ['edgelist', 'G516', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [5, 7]]], ['edgelist', 'G517', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [6, 7]]], ['edgelist', 'G518', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [1, 5]]], ['edgelist', 'G519', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3], [2, 7]]], ['edgelist', 'G520', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 3], [1, 3], [5, 7]]], ['edgelist', 'G521', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [3, 7]]], ['edgelist', 'G522', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [4, 7]]], ['edgelist', 'G523', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [3, 7]]], ['edgelist', 'G524', 7, [[1, 2], [3, 6], [1, 3], [6, 2], [4, 2], [4, 3], [3, 2], [7, 1], [7, 5]]], ['edgelist', 'G525', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [2, 7]]], ['edgelist', 'G526', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 1], [6, 7]]], ['edgelist', 'G527', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 7]]], ['edgelist', 'G528', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [1, 7]]], ['edgelist', 'G529', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [4, 7]]], ['edgelist', 'G530', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [3, 5]]], ['edgelist', 'G531', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [4, 7]]], ['edgelist', 'G532', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [2, 7]]], ['edgelist', 'G533', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 2], [5, 7]]], ['edgelist', 'G534', 7, [[1, 2], [3, 6], [1, 3], [6, 2], [4, 2], [4, 3], [4, 1], [7, 5], [7, 1]]], ['edgelist', 'G535', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1], [2, 7]]], ['edgelist', 'G536', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [7, 1]]], ['edgelist', 'G537', 7, [[6, 4], [4, 3], [5, 4], [6, 5], [3, 6], [2, 3], [5, 2], [7, 1], [7, 3]]], ['edgelist', 'G538', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1], [1, 7]]], ['edgelist', 'G539', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [6, 7]]], ['edgelist', 'G540', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [5, 7]]], ['edgelist', 'G541', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [4, 7], [2, 7], [1, 2], [6, 5]]], ['edgelist', 'G542', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [6, 3], [6, 7]]], ['edgelist', 'G543', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [2, 7]]], ['edgelist', 'G544', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [3, 2], [4, 7]]], ['edgelist', 'G545', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [1, 6], [6, 5], [5, 7]]], ['edgelist', 'G546', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 6], [6, 4], [2, 6], [1, 7]]], ['edgelist', 'G547', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 6], [6, 3], [6, 1], [5, 7]]], ['edgelist', 'G548', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 2], [1, 7]]], ['edgelist', 'G549', 7, [[1, 2], [3, 6], [1, 3], [6, 4], [4, 2], [4, 3], [6, 2], [7, 5], [7, 1]]], ['edgelist', 'G550', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [6, 3], [1, 7]]], ['edgelist', 'G551', 7, [[7, 4], [2, 3], [7, 6], [4, 5], [7, 5], [1, 6], [7, 1], [7, 2], [7, 3]]], ['edgelist', 'G552', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [7, 3], [6, 7], [3, 6]]], ['edgelist', 'G553', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [4, 5], [7, 6], [7, 1]]], ['edgelist', 'G554', 7, [[2, 5], [3, 5], [3, 4], [1, 5], [4, 2], [5, 6], [1, 6], [7, 5], [7, 4]]], ['edgelist', 'G555', 7, [[5, 2], [6, 5], [7, 6], [4, 7], [3, 4], [2, 3], [6, 3], [1, 6], [3, 1]]], ['edgelist', 'G556', 7, [[5, 2], [4, 2], [3, 4], [5, 1], [6, 1], [6, 3], [6, 5], [7, 5], [6, 7]]], ['edgelist', 'G557', 7, [[2, 1], [3, 2], [7, 3], [4, 7], [6, 4], [5, 6], [4, 5], [3, 4], [1, 3]]], ['edgelist', 'G558', 7, [[1, 3], [6, 1], [2, 6], [3, 2], [5, 3], [6, 5], [7, 6], [4, 7], [3, 4]]], ['edgelist', 'G559', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7], [1, 7], [2, 4], [5, 2]]], ['edgelist', 'G560', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 5], [6, 2], [7, 2], [1, 7]]], ['edgelist', 'G561', 7, [[1, 5], [2, 1], [5, 2], [4, 5], [3, 4], [7, 3], [6, 7], [2, 6], [3, 2]]], ['edgelist', 'G562', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 4], [7, 6], [4, 7]]], ['edgelist', 'G563', 7, [[7, 6], [4, 7], [3, 4], [1, 5], [1, 6], [2, 1], [3, 1], [2, 3], [6, 5]]], ['edgelist', 'G564', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 2], [7, 2], [1, 7]]], ['edgelist', 'G565', 7, [[6, 3], [7, 6], [4, 7], [3, 4], [1, 3], [5, 1], [6, 5], [2, 6], [1, 2]]], ['edgelist', 'G566', 7, [[3, 5], [2, 3], [5, 2], [6, 5], [1, 6], [2, 1], [7, 5], [4, 7], [3, 4]]], ['edgelist', 'G567', 7, [[7, 3], [6, 7], [3, 6], [2, 3], [1, 2], [5, 1], [2, 5], [4, 2], [1, 4]]], ['edgelist', 'G568', 7, [[1, 6], [7, 1], [2, 7], [5, 2], [3, 5], [4, 3], [2, 4], [6, 2], [7, 6]]], ['edgelist', 'G569', 7, [[7, 6], [4, 7], [3, 4], [6, 3], [1, 6], [2, 1], [5, 2], [1, 5], [3, 1]]], ['edgelist', 'G570', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [3, 5], [7, 3], [6, 7], [3, 6], [4, 3]]], ['edgelist', 'G571', 7, [[2, 1], [5, 2], [6, 5], [1, 6], [7, 1], [4, 7], [3, 4], [1, 3], [4, 5]]], ['edgelist', 'G572', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 4]]], ['edgelist', 'G573', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 2], [6, 5], [6, 4], [7, 1], [7, 5]]], ['edgelist', 'G574', 7, [[1, 2], [5, 1], [2, 5], [3, 2], [6, 3], [5, 6], [7, 6], [4, 7], [3, 4]]], ['edgelist', 'G575', 7, [[2, 1], [7, 4], [1, 5], [6, 1], [4, 6], [6, 7], [2, 3], [2, 5], [7, 3]]], ['edgelist', 'G576', 7, [[7, 3], [6, 7], [3, 6], [2, 3], [1, 4], [5, 1], [2, 5], [4, 2], [4, 5]]], ['edgelist', 'G577', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 2], [6, 7], [7, 1]]], ['edgelist', 'G578', 7, [[1, 5], [2, 1], [3, 2], [4, 3], [1, 4], [3, 5], [6, 5], [7, 6], [4, 7]]], ['edgelist', 'G579', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 3], [7, 2], [6, 7]]], ['edgelist', 'G580', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 5], [7, 2], [7, 6]]], ['edgelist', 'G581', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 5], [7, 3]]], ['edgelist', 'G582', 7, [[1, 5], [4, 1], [5, 4], [7, 2], [6, 7], [2, 6], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G583', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3]]], ['edgelist', 'G584', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 5]]], ['edgelist', 'G585', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1]]], ['edgelist', 'G586', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6], [2, 1]]], ['edgelist', 'G587', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 6], [2, 6]]], ['edgelist', 'G588', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [3, 5]]], ['edgelist', 'G589', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3]]], ['edgelist', 'G590', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3]]], ['edgelist', 'G591', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2]]], ['edgelist', 'G592', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [6, 3]]], ['edgelist', 'G593', 7, [[1, 2], [3, 5], [1, 3], [5, 6], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4]]], ['edgelist', 'G594', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5]]], ['edgelist', 'G595', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [1, 3], [2, 4], [6, 2]]], ['edgelist', 'G596', 7, [[1, 2], [2, 3], [4, 5], [1, 3], [4, 1], [3, 5], [6, 3], [2, 6], [5, 2], [4, 6]]], ['edgelist', 'G597', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [6, 4], [3, 6], [2, 1]]], ['edgelist', 'G598', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 3], [3, 7]]], ['edgelist', 'G599', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 3], [2, 7]]], ['edgelist', 'G600', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5], [2, 7]]], ['edgelist', 'G601', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5], [1, 7]]], ['edgelist', 'G602', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5], [4, 7]]], ['edgelist', 'G603', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [5, 6], [5, 7]]], ['edgelist', 'G604', 7, [[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4], [4, 7]]], ['edgelist', 'G605', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 6], [3, 7]]], ['edgelist', 'G606', 7, [[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4], [3, 7]]], ['edgelist', 'G607', 7, [[3, 4], [2, 3], [5, 2], [6, 5], [3, 6], [1, 3], [5, 1], [1, 2], [6, 1], [7, 6]]], ['edgelist', 'G608', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [1, 5], [6, 7]]], ['edgelist', 'G609', 7, [[3, 1], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [5, 4], [5, 7]]], ['edgelist', 'G610', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [5, 6], [7, 6]]], ['edgelist', 'G611', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 7]]], ['edgelist', 'G612', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6], [7, 6]]], ['edgelist', 'G613', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1], [1, 7]]], ['edgelist', 'G614', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1], [3, 7]]], ['edgelist', 'G615', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6], [2, 7]]], ['edgelist', 'G616', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1], [4, 7]]], ['edgelist', 'G617', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [1, 5], [2, 1], [5, 2], [1, 7]]], ['edgelist', 'G618', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2], [2, 7]]], ['edgelist', 'G619', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1], [1, 7]]], ['edgelist', 'G620', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [4, 7]]], ['edgelist', 'G621', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1], [6, 7]]], ['edgelist', 'G622', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [4, 7]]], ['edgelist', 'G623', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2], [1, 7]]], ['edgelist', 'G624', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [3, 1], [6, 4], [3, 4], [7, 3]]], ['edgelist', 'G625', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [7, 5], [7, 3]]], ['edgelist', 'G626', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [3, 7]]], ['edgelist', 'G627', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2], [6, 7]]], ['edgelist', 'G628', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [4, 1], [5, 7]]], ['edgelist', 'G629', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [4, 7]]], ['edgelist', 'G630', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1], [3, 7]]], ['edgelist', 'G631', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1], [6, 7]]], ['edgelist', 'G632', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [3, 7]]], ['edgelist', 'G633', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [3, 1], [6, 4], [3, 4], [1, 7]]], ['edgelist', 'G634', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [2, 7]]], ['edgelist', 'G635', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [5, 7]]], ['edgelist', 'G636', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [7, 5], [7, 1]]], ['edgelist', 'G637', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [6, 7]]], ['edgelist', 'G638', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [1, 5], [2, 1], [5, 2], [6, 7]]], ['edgelist', 'G639', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1], [1, 7]]], ['edgelist', 'G640', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [2, 1], [6, 2], [3, 7]]], ['edgelist', 'G641', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3], [3, 7]]], ['edgelist', 'G642', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4], [6, 7]]], ['edgelist', 'G643', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2], [1, 7]]], ['edgelist', 'G644', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [6, 5], [5, 1], [6, 1], [6, 7]]], ['edgelist', 'G645', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [7, 6], [7, 5]]], ['edgelist', 'G646', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [2, 7]]], ['edgelist', 'G647', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [5, 7]]], ['edgelist', 'G648', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4], [5, 7]]], ['edgelist', 'G649', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [3, 7]]], ['edgelist', 'G650', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4], [1, 7]]], ['edgelist', 'G651', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3], [6, 7]]], ['edgelist', 'G652', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2], [2, 7]]], ['edgelist', 'G653', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [4, 7]]], ['edgelist', 'G654', 7, [[5, 4], [5, 2], [2, 3], [6, 5], [3, 6], [4, 2], [6, 4], [4, 3], [7, 1], [7, 3]]], ['edgelist', 'G655', 7, [[2, 1], [3, 2], [4, 3], [5, 4], [6, 5], [2, 6], [7, 2], [5, 7], [3, 7], [6, 3]]], ['edgelist', 'G656', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3], [1, 7]]], ['edgelist', 'G657', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2], [4, 7]]], ['edgelist', 'G658', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2], [3, 7]]], ['edgelist', 'G659', 7, [[1, 2], [3, 6], [1, 3], [4, 1], [4, 2], [4, 3], [3, 2], [6, 2], [7, 6], [7, 5]]], ['edgelist', 'G660', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [2, 7]]], ['edgelist', 'G661', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 7]]], ['edgelist', 'G662', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [3, 1], [2, 7]]], ['edgelist', 'G663', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4], [2, 7]]], ['edgelist', 'G664', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [5, 3], [6, 3], [2, 7]]], ['edgelist', 'G665', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 1], [6, 3], [6, 1], [6, 2], [5, 7]]], ['edgelist', 'G666', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3], [5, 7]]], ['edgelist', 'G667', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [6, 3], [5, 2], [1, 7]]], ['edgelist', 'G668', 7, [[5, 1], [2, 5], [4, 2], [3, 4], [2, 3], [7, 2], [1, 7], [6, 1], [2, 6], [1, 2]]], ['edgelist', 'G669', 7, [[4, 3], [7, 4], [6, 7], [1, 6], [3, 1], [6, 3], [2, 6], [3, 2], [5, 3], [6, 5]]], ['edgelist', 'G670', 7, [[3, 1], [2, 3], [4, 2], [1, 4], [7, 1], [2, 7], [6, 2], [1, 6], [5, 1], [2, 5]]], ['edgelist', 'G671', 7, [[7, 5], [2, 3], [7, 6], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 3], [7, 4]]], ['edgelist', 'G672', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [7, 5], [1, 6], [7, 1], [7, 2], [7, 3], [7, 4]]], ['edgelist', 'G673', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [1, 6], [6, 3], [7, 2], [3, 7]]], ['edgelist', 'G674', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 1], [6, 3], [7, 3], [1, 7]]], ['edgelist', 'G675', 7, [[7, 5], [2, 3], [7, 6], [4, 5], [5, 6], [1, 6], [7, 4], [7, 2], [7, 3], [1, 5]]], ['edgelist', 'G676', 7, [[2, 1], [3, 2], [1, 3], [4, 3], [5, 4], [3, 5], [6, 3], [5, 6], [7, 5], [2, 7]]], ['edgelist', 'G677', 7, [[1, 2], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 6], [3, 7]]], ['edgelist', 'G678', 7, [[1, 3], [6, 1], [5, 6], [3, 5], [2, 3], [6, 2], [7, 6], [4, 7], [3, 4], [3, 7]]], ['edgelist', 'G679', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 2], [1, 6], [7, 1], [3, 7]]], ['edgelist', 'G680', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 3], [7, 5], [1, 3], [5, 1]]], ['edgelist', 'G681', 7, [[1, 5], [4, 1], [3, 4], [6, 3], [7, 6], [3, 7], [5, 3], [2, 5], [4, 2], [5, 4]]], ['edgelist', 'G682', 7, [[2, 7], [3, 2], [1, 3], [2, 1], [5, 2], [4, 5], [3, 4], [6, 7], [5, 6], [4, 2]]], ['edgelist', 'G683', 7, [[7, 6], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 3], [7, 5]]], ['edgelist', 'G684', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [7, 5], [1, 6], [7, 1], [7, 2], [7, 3], [6, 4]]], ['edgelist', 'G685', 7, [[1, 2], [2, 3], [3, 4], [6, 5], [1, 5], [6, 1], [6, 4], [6, 3], [7, 6], [7, 2]]], ['edgelist', 'G686', 7, [[1, 4], [3, 1], [2, 3], [4, 2], [5, 4], [3, 5], [1, 5], [7, 1], [6, 7], [1, 6]]], ['edgelist', 'G687', 7, [[1, 4], [3, 1], [2, 3], [4, 2], [5, 4], [1, 6], [1, 5], [7, 1], [6, 7], [2, 5]]], ['edgelist', 'G688', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [7, 5], [1, 6], [7, 1], [7, 2], [7, 3], [5, 3]]], ['edgelist', 'G689', 7, [[2, 3], [6, 2], [7, 6], [3, 7], [2, 7], [6, 3], [5, 2], [1, 5], [4, 1], [2, 4]]], ['edgelist', 'G690', 7, [[5, 3], [7, 3], [6, 4], [5, 2], [3, 1], [7, 4], [6, 3], [1, 2], [1, 5], [7, 1]]], ['edgelist', 'G691', 7, [[5, 3], [4, 7], [6, 4], [6, 2], [3, 1], [7, 1], [6, 3], [2, 5], [1, 5], [6, 5]]], ['edgelist', 'G692', 7, [[5, 1], [6, 5], [5, 2], [3, 2], [4, 3], [1, 4], [4, 5], [6, 4], [7, 2], [7, 6]]], ['edgelist', 'G693', 7, [[1, 5], [2, 1], [3, 2], [5, 3], [4, 5], [6, 4], [5, 6], [6, 3], [7, 4], [3, 7]]], ['edgelist', 'G694', 7, [[2, 7], [3, 2], [1, 3], [2, 1], [5, 2], [4, 5], [3, 4], [6, 7], [5, 6], [5, 7]]], ['edgelist', 'G695', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 4], [7, 2], [7, 6], [6, 2]]], ['edgelist', 'G696', 7, [[2, 1], [5, 2], [1, 5], [3, 1], [4, 3], [7, 4], [6, 7], [1, 6], [6, 3], [7, 3]]], ['edgelist', 'G697', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 2], [6, 5], [7, 2], [6, 7]]], ['edgelist', 'G698', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [5, 2], [7, 2], [7, 6]]], ['edgelist', 'G699', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 4], [3, 6], [7, 2], [5, 7]]], ['edgelist', 'G700', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 3], [6, 5], [7, 6], [7, 1], [1, 3]]], ['edgelist', 'G701', 7, [[3, 1], [6, 3], [2, 6], [1, 2], [4, 1], [6, 4], [7, 6], [5, 7], [1, 5], [5, 4]]], ['edgelist', 'G702', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 3], [2, 6], [7, 3], [7, 6]]], ['edgelist', 'G703', 7, [[6, 1], [7, 6], [3, 7], [4, 3], [1, 4], [5, 1], [3, 5], [5, 4], [2, 5], [4, 2]]], ['edgelist', 'G704', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [7, 4], [6, 7], [4, 6], [5, 6], [5, 7]]], ['edgelist', 'G705', 7, [[6, 3], [3, 2], [4, 3], [5, 4], [2, 5], [6, 1], [7, 2], [7, 1], [2, 6], [3, 7]]], ['edgelist', 'G706', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 3], [7, 5], [5, 3], [6, 2]]], ['edgelist', 'G707', 7, [[5, 3], [3, 4], [5, 2], [1, 2], [4, 1], [7, 5], [1, 7], [6, 1], [5, 6], [2, 6]]], ['edgelist', 'G708', 7, [[3, 2], [6, 3], [4, 6], [1, 4], [5, 1], [7, 5], [4, 7], [2, 4], [5, 2], [6, 5]]], ['edgelist', 'G709', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [7, 6], [7, 4]]], ['edgelist', 'G710', 7, [[1, 2], [5, 1], [2, 5], [3, 2], [6, 3], [5, 6], [7, 6], [4, 7], [3, 4], [6, 4]]], ['edgelist', 'G711', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 6], [7, 2], [7, 3], [5, 3]]], ['edgelist', 'G712', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 4], [6, 3], [7, 6], [7, 5]]], ['edgelist', 'G713', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 4], [7, 3], [5, 1]]], ['edgelist', 'G714', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 5], [7, 6], [7, 4]]], ['edgelist', 'G715', 7, [[1, 6], [7, 1], [2, 7], [1, 2], [2, 6], [3, 2], [4, 3], [5, 4], [7, 5], [5, 6]]], ['edgelist', 'G716', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 6], [7, 5], [3, 1]]], ['edgelist', 'G717', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 6], [7, 4]]], ['edgelist', 'G718', 7, [[3, 2], [3, 1], [4, 3], [5, 4], [2, 5], [6, 2], [6, 1], [7, 1], [2, 7], [7, 6]]], ['edgelist', 'G719', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 2], [7, 2], [7, 5], [7, 6]]], ['edgelist', 'G720', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 3], [5, 2], [7, 1], [6, 7]]], ['edgelist', 'G721', 7, [[4, 2], [1, 4], [6, 1], [2, 6], [3, 2], [7, 3], [1, 7], [1, 5], [5, 3], [5, 7]]], ['edgelist', 'G722', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 5], [7, 2], [7, 3], [7, 6]]], ['edgelist', 'G723', 7, [[1, 4], [3, 1], [2, 3], [4, 2], [5, 4], [3, 5], [6, 5], [6, 1], [7, 5], [7, 2]]], ['edgelist', 'G724', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [7, 5], [1, 6], [7, 3], [7, 2], [5, 3], [6, 2]]], ['edgelist', 'G725', 7, [[6, 3], [7, 6], [3, 7], [5, 3], [1, 5], [4, 1], [3, 4], [2, 1], [2, 4], [5, 2]]], ['edgelist', 'G726', 7, [[4, 5], [2, 4], [5, 2], [1, 5], [4, 1], [2, 1], [3, 2], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G727', 7, [[6, 7], [3, 6], [7, 3], [4, 7], [1, 4], [5, 1], [6, 5], [2, 5], [4, 2], [3, 2]]], ['edgelist', 'G728', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 6], [5, 3]]], ['edgelist', 'G729', 7, [[2, 1], [3, 2], [4, 3], [1, 4], [6, 1], [2, 6], [5, 6], [7, 5], [4, 7], [3, 7]]], ['edgelist', 'G730', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 2], [3, 6], [7, 1], [4, 7]]], ['edgelist', 'G731', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3], [2, 6]]], ['edgelist', 'G732', 7, [[1, 2], [3, 5], [1, 3], [3, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4]]], ['edgelist', 'G733', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6]]], ['edgelist', 'G734', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [1, 3]]], ['edgelist', 'G735', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [6, 2]]], ['edgelist', 'G736', 7, [[2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G737', 7, [[4, 7], [2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [3, 7], [6, 2], [1, 4], [2, 7], [1, 2]]], ['edgelist', 'G738', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3]]], ['edgelist', 'G739', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [1, 4]]], ['edgelist', 'G740', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 5], [7, 5]]], ['edgelist', 'G741', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [7, 5]]], ['edgelist', 'G742', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 3], [5, 7]]], ['edgelist', 'G743', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [3, 6], [7, 3]]], ['edgelist', 'G744', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 3], [1, 7]]], ['edgelist', 'G745', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [7, 6]]], ['edgelist', 'G746', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6], [2, 1], [5, 7]]], ['edgelist', 'G747', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 6], [2, 6], [2, 7]]], ['edgelist', 'G748', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 6], [2, 6], [7, 5]]], ['edgelist', 'G749', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [5, 6], [2, 1], [2, 7]]], ['edgelist', 'G750', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [3, 5], [3, 7]]], ['edgelist', 'G751', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 6], [2, 6], [6, 7]]], ['edgelist', 'G752', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3], [3, 7]]], ['edgelist', 'G753', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3], [7, 2]]], ['edgelist', 'G754', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3], [4, 7]]], ['edgelist', 'G755', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3], [7, 5]]], ['edgelist', 'G756', 7, [[1, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G757', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [6, 3], [1, 7]]], ['edgelist', 'G758', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3], [1, 7]]], ['edgelist', 'G759', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2], [2, 7]]], ['edgelist', 'G760', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3], [6, 7]]], ['edgelist', 'G761', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [6, 5], [4, 6], [2, 6], [1, 7]]], ['edgelist', 'G762', 7, [[1, 2], [3, 5], [1, 3], [5, 6], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [3, 7]]], ['edgelist', 'G763', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3], [4, 7]]], ['edgelist', 'G764', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2], [3, 7]]], ['edgelist', 'G765', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3], [6, 7]]], ['edgelist', 'G766', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [6, 3], [6, 7]]], ['edgelist', 'G767', 7, [[1, 2], [3, 5], [1, 3], [5, 6], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [6, 7]]], ['edgelist', 'G768', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 5], [6, 7]]], ['edgelist', 'G769', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [6, 3], [2, 7]]], ['edgelist', 'G770', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2], [5, 7]]], ['edgelist', 'G771', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2], [6, 7]]], ['edgelist', 'G772', 7, [[1, 2], [3, 5], [1, 3], [5, 6], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [5, 7]]], ['edgelist', 'G773', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [5, 1], [3, 5], [2, 7]]], ['edgelist', 'G774', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [7, 6], [7, 3]]], ['edgelist', 'G775', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [6, 7]]], ['edgelist', 'G776', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [1, 3], [2, 4], [6, 2], [2, 7]]], ['edgelist', 'G777', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [2, 7]]], ['edgelist', 'G778', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [5, 6], [4, 5], [2, 4], [6, 2], [3, 4], [2, 3], [3, 7]]], ['edgelist', 'G779', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [4, 3], [1, 4], [3, 5], [6, 3], [2, 7]]], ['edgelist', 'G780', 7, [[1, 7], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [6, 7]]], ['edgelist', 'G781', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [6, 4], [3, 6], [2, 1], [2, 7]]], ['edgelist', 'G782', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [1, 3], [2, 4], [6, 2], [6, 7]]], ['edgelist', 'G783', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [1, 3], [2, 4], [6, 2], [7, 4]]], ['edgelist', 'G784', 7, [[5, 4], [1, 5], [2, 1], [3, 2], [4, 3], [1, 6], [6, 4], [1, 4], [2, 6], [6, 3], [5, 7]]], ['edgelist', 'G785', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 6], [1, 6], [3, 1], [6, 2], [5, 2], [7, 4]]], ['edgelist', 'G786', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [4, 3], [1, 4], [2, 7]]], ['edgelist', 'G787', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [6, 4], [3, 6], [2, 1], [7, 3]]], ['edgelist', 'G788', 7, [[1, 2], [3, 5], [1, 3], [5, 6], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 7]]], ['edgelist', 'G789', 7, [[1, 7], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6]]], ['edgelist', 'G790', 7, [[7, 6], [1, 7], [6, 1], [2, 6], [7, 2], [3, 7], [6, 3], [4, 6], [7, 4], [5, 7], [6, 5]]], ['edgelist', 'G791', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 2], [3, 6], [7, 3], [2, 7], [4, 2]]], ['edgelist', 'G792', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 2], [4, 6], [7, 2], [5, 7], [2, 5], [4, 2]]], ['edgelist', 'G793', 7, [[2, 5], [3, 4], [5, 3], [1, 7], [5, 6], [7, 6], [4, 2], [7, 5], [4, 1], [4, 7], [5, 4]]], ['edgelist', 'G794', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 3], [7, 5], [3, 7], [6, 3], [1, 3]]], ['edgelist', 'G795', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [5, 1], [4, 1], [3, 1], [7, 1], [4, 7]]], ['edgelist', 'G796', 7, [[1, 2], [3, 1], [6, 3], [7, 6], [3, 7], [2, 3], [5, 2], [3, 5], [4, 3], [5, 4], [4, 2]]], ['edgelist', 'G797', 7, [[5, 6], [2, 5], [3, 2], [4, 3], [7, 4], [6, 7], [3, 6], [5, 3], [4, 6], [1, 6], [3, 1]]], ['edgelist', 'G798', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 3], [6, 3], [6, 1], [7, 3], [5, 7], [6, 5]]], ['edgelist', 'G799', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [7, 2], [3, 7], [1, 3], [7, 1], [6, 3], [1, 6]]], ['edgelist', 'G800', 7, [[1, 6], [7, 1], [2, 7], [6, 2], [3, 6], [7, 3], [5, 4], [4, 3], [5, 6], [7, 5], [7, 6]]], ['edgelist', 'G801', 7, [[1, 6], [7, 1], [2, 7], [6, 2], [3, 6], [7, 3], [4, 7], [6, 4], [5, 6], [7, 5], [5, 4]]], ['edgelist', 'G802', 7, [[1, 6], [1, 7], [2, 3], [2, 7], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G803', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [1, 3], [3, 5], [6, 3], [5, 6], [7, 6], [7, 1]]], ['edgelist', 'G804', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7], [1, 7], [5, 3], [1, 5], [3, 1], [7, 5]]], ['edgelist', 'G805', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [6, 2], [6, 3], [7, 2], [3, 7], [5, 3], [6, 5]]], ['edgelist', 'G806', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 2], [3, 6], [5, 3], [7, 3], [5, 7]]], ['edgelist', 'G807', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 5], [7, 3], [1, 3], [5, 1]]], ['edgelist', 'G808', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [4, 2], [6, 4], [5, 6], [2, 5], [7, 6], [7, 2]]], ['edgelist', 'G809', 7, [[1, 5], [4, 1], [5, 4], [3, 5], [4, 3], [2, 4], [3, 2], [5, 2], [6, 3], [7, 6], [3, 7]]], ['edgelist', 'G810', 7, [[1, 6], [1, 7], [2, 5], [2, 7], [3, 4], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G811', 7, [[1, 2], [5, 1], [6, 5], [7, 6], [4, 7], [3, 4], [2, 3], [5, 2], [3, 5], [6, 3], [2, 6]]], ['edgelist', 'G812', 7, [[1, 5], [4, 1], [5, 4], [3, 5], [7, 3], [2, 7], [6, 2], [3, 6], [4, 3], [2, 4], [5, 2]]], ['edgelist', 'G813', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 3], [7, 4], [7, 5]]], ['edgelist', 'G814', 7, [[5, 2], [1, 5], [2, 1], [4, 2], [1, 4], [6, 2], [7, 6], [2, 7], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G815', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 5], [7, 6], [5, 7]]], ['edgelist', 'G816', 7, [[2, 1], [3, 2], [4, 3], [5, 4], [1, 5], [3, 1], [6, 3], [7, 6], [4, 7], [7, 1], [1, 6]]], ['edgelist', 'G817', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 3], [5, 1], [7, 5], [1, 7], [4, 7]]], ['edgelist', 'G818', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [3, 1], [6, 3], [7, 6], [5, 7], [1, 6], [7, 1]]], ['edgelist', 'G819', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [3, 7], [4, 7], [1, 4], [5, 1]]], ['edgelist', 'G820', 7, [[5, 7], [6, 5], [7, 6], [4, 7], [6, 4], [3, 6], [4, 3], [6, 1], [7, 1], [2, 1], [3, 2]]], ['edgelist', 'G821', 7, [[3, 1], [5, 3], [6, 5], [4, 6], [2, 4], [1, 2], [3, 2], [4, 3], [7, 4], [6, 7], [5, 4]]], ['edgelist', 'G822', 7, [[5, 4], [5, 3], [2, 5], [4, 2], [1, 4], [2, 1], [4, 3], [4, 6], [3, 6], [7, 1], [5, 7]]], ['edgelist', 'G823', 7, [[1, 2], [1, 3], [3, 4], [6, 2], [2, 4], [6, 3], [7, 4], [7, 1], [6, 4], [5, 6], [4, 5]]], ['edgelist', 'G824', 7, [[5, 1], [2, 5], [7, 2], [1, 7], [4, 1], [2, 4], [6, 2], [1, 6], [7, 6], [3, 4], [1, 3]]], ['edgelist', 'G825', 7, [[1, 2], [6, 1], [5, 6], [2, 5], [3, 2], [4, 3], [5, 4], [5, 3], [7, 5], [3, 7], [4, 7]]], ['edgelist', 'G826', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 2], [4, 6], [7, 4], [5, 7], [6, 7]]], ['edgelist', 'G827', 7, [[7, 4], [6, 7], [3, 6], [4, 3], [6, 4], [5, 6], [3, 5], [2, 3], [6, 2], [1, 2], [5, 1]]], ['edgelist', 'G828', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 2], [7, 6], [5, 7]]], ['edgelist', 'G829', 7, [[1, 5], [4, 1], [3, 4], [6, 3], [7, 6], [3, 7], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2]]], ['edgelist', 'G830', 7, [[6, 1], [1, 2], [4, 1], [6, 4], [3, 6], [7, 3], [5, 7], [6, 5], [2, 6], [7, 2], [4, 7]]], ['edgelist', 'G831', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [3, 2], [6, 2], [3, 6], [7, 5], [7, 3], [4, 7]]], ['edgelist', 'G832', 7, [[4, 3], [7, 4], [6, 7], [1, 6], [3, 1], [2, 3], [1, 2], [6, 2], [3, 6], [5, 3], [7, 5]]], ['edgelist', 'G833', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 2], [6, 4], [6, 2], [7, 5], [7, 6], [4, 7]]], ['edgelist', 'G834', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 4], [7, 1], [7, 3], [7, 4], [6, 7]]], ['edgelist', 'G835', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 2], [7, 6], [5, 7], [4, 7], [2, 7]]], ['edgelist', 'G836', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 2], [7, 6], [2, 7], [3, 7], [5, 3]]], ['edgelist', 'G837', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 4], [5, 3], [7, 2], [7, 5], [6, 3], [4, 6]]], ['edgelist', 'G838', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 2], [7, 2], [5, 7], [7, 6], [3, 7]]], ['edgelist', 'G839', 7, [[1, 4], [1, 7], [2, 3], [2, 6], [3, 5], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G840', 7, [[6, 2], [7, 6], [5, 7], [4, 5], [3, 4], [1, 3], [2, 1], [6, 1], [7, 1], [3, 7], [5, 3]]], ['edgelist', 'G841', 7, [[2, 1], [3, 2], [4, 3], [5, 4], [1, 5], [6, 3], [4, 6], [7, 1], [7, 6], [7, 3], [4, 7]]], ['edgelist', 'G842', 7, [[1, 4], [5, 1], [3, 5], [4, 3], [2, 4], [5, 2], [6, 2], [7, 1], [7, 2], [6, 4], [5, 6]]], ['edgelist', 'G843', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [3, 4], [3, 6], [3, 7], [4, 5], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G844', 7, [[1, 3], [2, 1], [3, 2], [1, 4], [4, 2], [6, 5], [6, 4], [7, 5], [7, 3], [7, 1], [2, 7]]], ['edgelist', 'G845', 7, [[5, 2], [6, 5], [3, 6], [2, 3], [1, 2], [6, 1], [7, 6], [4, 7], [3, 4], [1, 3], [5, 1]]], ['edgelist', 'G846', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [7, 2], [7, 3], [6, 2], [4, 6], [6, 3], [5, 6]]], ['edgelist', 'G847', 7, [[1, 6], [1, 7], [2, 5], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6]]], ['edgelist', 'G848', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 4], [7, 5], [7, 3], [3, 1], [5, 1]]], ['edgelist', 'G849', 7, [[1, 3], [2, 1], [3, 2], [1, 4], [4, 2], [6, 5], [6, 4], [7, 5], [7, 3], [5, 1], [2, 5]]], ['edgelist', 'G850', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [7, 3], [1, 7], [2, 7]]], ['edgelist', 'G851', 7, [[1, 4], [5, 1], [2, 5], [4, 2], [5, 4], [1, 2], [3, 5], [7, 3], [6, 7], [3, 6], [2, 3]]], ['edgelist', 'G852', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4], [7, 2], [7, 6]]], ['edgelist', 'G853', 7, [[5, 2], [6, 5], [3, 6], [2, 3], [1, 2], [5, 1], [6, 1], [7, 6], [4, 7], [3, 4], [6, 4]]], ['edgelist', 'G854', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 4], [5, 3], [6, 2], [6, 5], [7, 6], [5, 7]]], ['edgelist', 'G855', 7, [[1, 2], [2, 3], [3, 4], [6, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [7, 4], [7, 5]]], ['edgelist', 'G856', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [4, 5], [6, 2], [7, 6], [2, 7], [3, 2], [6, 3], [7, 3]]], ['edgelist', 'G857', 7, [[5, 2], [1, 5], [4, 1], [3, 6], [6, 5], [7, 6], [3, 7], [2, 3], [4, 2], [7, 4], [3, 4]]], ['edgelist', 'G858', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 3], [6, 5], [6, 4], [7, 1], [7, 6], [4, 7]]], ['edgelist', 'G859', 7, [[6, 3], [3, 5], [6, 4], [5, 2], [6, 5], [1, 2], [4, 1], [1, 3], [7, 3], [7, 4], [1, 7]]], ['edgelist', 'G860', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 5], [6, 3], [6, 4], [7, 2], [1, 7]]], ['edgelist', 'G861', 7, [[1, 4], [1, 5], [2, 3], [2, 6], [2, 7], [3, 5], [3, 7], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G862', 7, [[5, 1], [4, 5], [6, 4], [1, 6], [2, 1], [3, 2], [4, 3], [5, 2], [6, 3], [7, 5], [6, 7]]], ['edgelist', 'G863', 7, [[3, 4], [5, 3], [1, 5], [6, 1], [2, 6], [5, 2], [4, 5], [6, 4], [2, 1], [7, 6], [7, 3]]], ['edgelist', 'G864', 7, [[5, 2], [1, 5], [4, 1], [5, 4], [6, 5], [7, 6], [3, 7], [2, 3], [4, 2], [7, 4], [3, 6]]], ['edgelist', 'G865', 7, [[1, 4], [5, 1], [3, 5], [4, 3], [2, 4], [1, 2], [7, 1], [6, 7], [3, 6], [2, 6], [5, 2]]], ['edgelist', 'G866', 7, [[1, 4], [1, 5], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G867', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 4], [5, 3], [6, 2], [6, 5], [7, 2], [6, 7]]], ['edgelist', 'G868', 7, [[5, 2], [6, 5], [7, 6], [4, 7], [3, 4], [2, 3], [1, 2], [6, 1], [5, 1], [6, 3], [7, 3]]], ['edgelist', 'G869', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [5, 3], [4, 6], [5, 6], [4, 1], [7, 6], [7, 2]]], ['edgelist', 'G870', 7, [[1, 5], [2, 1], [5, 2], [4, 5], [3, 4], [2, 3], [7, 2], [6, 7], [4, 6], [6, 5], [3, 7]]], ['edgelist', 'G871', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [6, 2], [5, 3], [7, 3], [4, 7], [5, 7]]], ['edgelist', 'G872', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 3], [2, 7], [6, 3], [5, 2], [1, 4]]], ['edgelist', 'G873', 7, [[1, 4], [1, 5], [2, 3], [2, 6], [2, 7], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G874', 7, [[1, 2], [2, 3], [3, 4], [6, 5], [1, 5], [6, 4], [6, 2], [7, 4], [7, 5], [5, 3], [1, 4]]], ['edgelist', 'G875', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 7], [4, 7], [5, 6]]], ['edgelist', 'G876', 7, [[5, 4], [3, 5], [4, 3], [1, 4], [3, 2], [6, 5], [6, 1], [7, 5], [7, 2], [2, 6], [1, 7]]], ['edgelist', 'G877', 7, [[7, 5], [4, 7], [2, 4], [5, 2], [1, 5], [3, 1], [4, 3], [1, 2], [6, 1], [7, 6], [6, 3]]], ['edgelist', 'G878', 7, [[7, 2], [3, 7], [2, 3], [1, 2], [4, 1], [5, 4], [6, 5], [4, 6], [3, 1], [5, 1], [6, 7]]], ['edgelist', 'G879', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [5, 4], [1, 3]]], ['edgelist', 'G880', 7, [[4, 7], [2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [3, 7], [6, 2], [1, 4], [2, 7], [1, 2], [1, 7]]], ['edgelist', 'G881', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [6, 2], [3, 5]]], ['edgelist', 'G882', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [3, 4]]], ['edgelist', 'G883', 7, [[1, 2], [2, 3], [1, 3], [4, 3], [4, 2], [5, 1], [3, 5], [6, 2], [1, 6], [5, 6], [4, 5], [6, 4]]], ['edgelist', 'G884', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3], [2, 6], [7, 2]]], ['edgelist', 'G885', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3], [5, 7], [6, 4]]], ['edgelist', 'G886', 7, [[1, 2], [3, 5], [1, 3], [3, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4], [2, 7]]], ['edgelist', 'G887', 7, [[1, 2], [3, 5], [1, 3], [3, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4], [4, 7]]], ['edgelist', 'G888', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6], [5, 7]]], ['edgelist', 'G889', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6], [7, 2]]], ['edgelist', 'G890', 7, [[1, 2], [3, 5], [1, 3], [3, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4], [1, 7]]], ['edgelist', 'G891', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6], [1, 7]]], ['edgelist', 'G892', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6], [3, 7]]], ['edgelist', 'G893', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [1, 3], [2, 7]]], ['edgelist', 'G894', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [1, 3], [5, 7]]], ['edgelist', 'G895', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [1, 3], [7, 2], [7, 6]]], ['edgelist', 'G896', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 2], [6, 4], [3, 6], [2, 1], [5, 2], [2, 7]]], ['edgelist', 'G897', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [4, 6], [1, 4], [6, 7]]], ['edgelist', 'G898', 7, [[4, 7], [2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [3, 7], [6, 2], [1, 4], [2, 7], [1, 2], [2, 5]]], ['edgelist', 'G899', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [4, 6], [1, 4], [4, 7]]], ['edgelist', 'G900', 7, [[1, 2], [3, 5], [1, 3], [3, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4], [5, 7]]], ['edgelist', 'G901', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 2], [6, 4], [3, 6], [2, 1], [5, 2], [3, 7]]], ['edgelist', 'G902', 7, [[4, 7], [2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [3, 7], [6, 2], [1, 4], [2, 7], [1, 2], [1, 5]]], ['edgelist', 'G903', 7, [[2, 4], [5, 2], [4, 5], [3, 4], [1, 3], [5, 1], [6, 5], [3, 6], [5, 3], [1, 6], [2, 6], [4, 7]]], ['edgelist', 'G904', 7, [[2, 4], [5, 2], [4, 5], [3, 4], [1, 3], [5, 1], [6, 5], [3, 6], [5, 3], [1, 6], [2, 6], [1, 7]]], ['edgelist', 'G905', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 1], [5, 6], [6, 7]]], ['edgelist', 'G906', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [1, 4], [6, 7]]], ['edgelist', 'G907', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3], [5, 7]]], ['edgelist', 'G908', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3], [1, 7]]], ['edgelist', 'G909', 7, [[4, 7], [2, 4], [3, 2], [1, 3], [6, 1], [7, 6], [3, 7], [6, 2], [1, 4], [2, 7], [1, 2], [5, 6]]], ['edgelist', 'G910', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3], [4, 7]]], ['edgelist', 'G911', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [1, 4], [1, 7]]], ['edgelist', 'G912', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [1, 3], [6, 7]]], ['edgelist', 'G913', 7, [[1, 4], [7, 1], [6, 7], [4, 6], [2, 4], [7, 2], [5, 7], [4, 5], [3, 4], [7, 3], [4, 7], [6, 5]]], ['edgelist', 'G914', 7, [[1, 2], [5, 1], [6, 5], [2, 6], [5, 2], [3, 5], [2, 3], [7, 2], [5, 7], [3, 7], [4, 3], [5, 4]]], ['edgelist', 'G915', 7, [[5, 2], [4, 3], [4, 1], [5, 3], [6, 2], [6, 1], [4, 6], [5, 4], [6, 5], [7, 6], [4, 7], [5, 7]]], ['edgelist', 'G916', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [5, 3], [4, 5], [6, 4], [1, 6], [7, 4], [2, 7]]], ['edgelist', 'G917', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [7, 4], [1, 7], [6, 1], [2, 6], [5, 2], [3, 5]]], ['edgelist', 'G918', 7, [[7, 3], [6, 7], [4, 6], [3, 4], [2, 3], [5, 2], [6, 5], [3, 6], [5, 3], [1, 5], [2, 1], [6, 2]]], ['edgelist', 'G919', 7, [[6, 5], [7, 6], [4, 7], [5, 4], [1, 5], [4, 1], [2, 4], [1, 2], [5, 2], [4, 6], [3, 4], [5, 3]]], ['edgelist', 'G920', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [6, 1], [2, 6], [7, 2], [1, 7]]], ['edgelist', 'G921', 7, [[2, 3], [1, 2], [3, 1], [4, 3], [1, 4], [2, 4], [5, 3], [1, 5], [6, 5], [3, 6], [7, 3], [2, 7]]], ['edgelist', 'G922', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 3], [5, 6], [7, 5], [4, 7]]], ['edgelist', 'G923', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [6, 1], [2, 6], [7, 2], [3, 7]]], ['edgelist', 'G924', 7, [[2, 3], [1, 2], [3, 1], [4, 3], [1, 4], [2, 4], [5, 3], [1, 5], [7, 5], [3, 7], [6, 3], [5, 6]]], ['edgelist', 'G925', 7, [[2, 1], [3, 2], [1, 3], [4, 1], [5, 4], [1, 5], [6, 1], [4, 6], [5, 6], [7, 5], [4, 7], [7, 1]]], ['edgelist', 'G926', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [5, 2], [1, 5], [6, 1], [3, 6], [7, 6], [3, 7]]], ['edgelist', 'G927', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [7, 5], [1, 7], [6, 1], [4, 6]]], ['edgelist', 'G928', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [2, 6], [7, 2], [1, 7], [6, 1], [5, 6]]], ['edgelist', 'G929', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [7, 2], [1, 7], [6, 1], [3, 6]]], ['edgelist', 'G930', 7, [[6, 5], [4, 6], [5, 4], [7, 5], [4, 7], [3, 4], [5, 3], [1, 5], [4, 1], [2, 1], [3, 2], [7, 3]]], ['edgelist', 'G931', 7, [[5, 2], [4, 3], [4, 1], [5, 3], [6, 2], [6, 1], [4, 6], [5, 4], [6, 5], [7, 6], [1, 7], [4, 7]]], ['edgelist', 'G932', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [7, 2], [1, 7], [6, 1], [2, 6]]], ['edgelist', 'G933', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [4, 2], [6, 2], [7, 6], [5, 7]]], ['edgelist', 'G934', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [6, 5], [4, 6], [7, 4], [5, 7]]], ['edgelist', 'G935', 7, [[2, 1], [3, 2], [4, 3], [1, 4], [5, 4], [2, 5], [5, 1], [6, 5], [1, 6], [4, 6], [7, 1], [2, 7]]], ['edgelist', 'G936', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [7, 3], [5, 7], [6, 1], [2, 6]]], ['edgelist', 'G937', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [6, 5], [3, 6], [2, 6], [7, 2], [6, 7]]], ['edgelist', 'G938', 7, [[1, 3], [2, 1], [3, 2], [1, 4], [4, 2], [5, 3], [6, 4], [7, 2], [7, 5], [5, 1], [4, 5], [2, 6]]], ['edgelist', 'G939', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 3], [5, 4], [5, 2], [1, 5], [6, 3], [6, 5], [7, 1], [4, 7]]], ['edgelist', 'G940', 7, [[6, 1], [3, 6], [7, 3], [4, 7], [3, 4], [2, 3], [1, 2], [5, 1], [2, 5], [6, 2], [7, 6], [1, 3]]], ['edgelist', 'G941', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [7, 5], [3, 7], [6, 3], [4, 6]]], ['edgelist', 'G942', 7, [[1, 3], [2, 1], [6, 2], [4, 6], [7, 4], [3, 7], [5, 3], [4, 5], [6, 5], [3, 6], [2, 3], [5, 2]]], ['edgelist', 'G943', 7, [[1, 3], [2, 1], [3, 2], [1, 4], [4, 2], [5, 1], [2, 5], [5, 3], [4, 5], [6, 5], [7, 6], [4, 7]]], ['edgelist', 'G944', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [4, 2], [4, 3], [7, 2], [3, 7], [5, 7], [4, 5], [6, 4], [7, 6]]], ['edgelist', 'G945', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [6, 1], [7, 6], [1, 7], [4, 5]]], ['edgelist', 'G946', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 2], [3, 6], [7, 1], [4, 7]]], ['edgelist', 'G947', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 4], [6, 2], [6, 5], [7, 4], [5, 7], [2, 7], [7, 6]]], ['edgelist', 'G948', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G949', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 3], [7, 6], [1, 7], [7, 3], [1, 6], [2, 6], [7, 2]]], ['edgelist', 'G950', 7, [[1, 2], [7, 6], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 3], [7, 4], [7, 5], [6, 2]]], ['edgelist', 'G951', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [7, 2], [6, 7]]], ['edgelist', 'G952', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 2], [5, 6], [7, 5], [6, 7]]], ['edgelist', 'G953', 7, [[3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 2], [6, 4], [3, 6], [2, 1], [5, 2], [7, 4], [7, 2]]], ['edgelist', 'G954', 7, [[1, 5], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G955', 7, [[1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G956', 7, [[1, 2], [3, 5], [1, 3], [3, 2], [5, 7], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4], [7, 2], [3, 7]]], ['edgelist', 'G957', 7, [[1, 2], [2, 3], [3, 4], [6, 5], [1, 5], [6, 4], [6, 2], [7, 4], [7, 5], [5, 3], [1, 4], [5, 4]]], ['edgelist', 'G958', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [7, 2], [7, 6]]], ['edgelist', 'G959', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 2], [7, 6], [1, 7], [2, 7]]], ['edgelist', 'G960', 7, [[1, 4], [5, 1], [3, 5], [4, 3], [2, 4], [5, 2], [2, 1], [6, 2], [6, 3], [7, 2], [3, 7], [5, 7]]], ['edgelist', 'G961', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [6, 5], [5, 2], [2, 7], [6, 4], [2, 6], [7, 3]]], ['edgelist', 'G962', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [6, 5], [5, 2], [2, 6], [6, 4], [7, 2], [5, 7]]], ['edgelist', 'G963', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [6, 5], [5, 2], [2, 6], [6, 4], [7, 2], [1, 7]]], ['edgelist', 'G964', 7, [[5, 4], [2, 3], [1, 2], [1, 4], [5, 1], [7, 5], [5, 3], [6, 5], [7, 3], [7, 4], [4, 3], [6, 2]]], ['edgelist', 'G965', 7, [[3, 4], [5, 3], [1, 5], [7, 1], [7, 6], [5, 6], [2, 4], [6, 2], [1, 6], [7, 2], [4, 7], [6, 4]]], ['edgelist', 'G966', 7, [[1, 4], [1, 6], [2, 3], [2, 6], [2, 7], [3, 5], [3, 7], [4, 5], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G967', 7, [[1, 4], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G968', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [5, 4], [6, 4], [7, 2], [4, 7], [7, 3], [1, 7], [5, 7]]], ['edgelist', 'G969', 7, [[1, 2], [3, 5], [1, 3], [7, 2], [4, 2], [4, 3], [5, 2], [6, 2], [6, 3], [6, 4], [1, 4], [5, 7]]], ['edgelist', 'G970', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [7, 4], [2, 7]]], ['edgelist', 'G971', 7, [[5, 4], [2, 3], [6, 1], [1, 4], [5, 1], [5, 2], [5, 3], [6, 2], [7, 3], [7, 4], [4, 3], [7, 5]]], ['edgelist', 'G972', 7, [[3, 4], [5, 3], [6, 5], [1, 6], [7, 1], [2, 7], [4, 2], [7, 4], [6, 4], [2, 6], [5, 1], [4, 1]]], ['edgelist', 'G973', 7, [[1, 4], [1, 6], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G974', 7, [[4, 3], [2, 3], [6, 1], [1, 4], [5, 1], [5, 2], [7, 5], [6, 2], [7, 3], [7, 4], [7, 2], [1, 7]]], ['edgelist', 'G975', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [2, 7], [3, 4], [3, 5], [3, 7], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G976', 7, [[1, 4], [1, 6], [2, 3], [2, 5], [2, 7], [3, 5], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G977', 7, [[1, 4], [1, 7], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [5, 7], [6, 7]]], ['edgelist', 'G978', 7, [[1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7]]], ['edgelist', 'G979', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [6, 5], [5, 2], [4, 5], [6, 4], [3, 7], [7, 2]]], ['edgelist', 'G980', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [6, 5], [5, 2], [4, 5], [6, 4], [7, 2], [7, 6]]], ['edgelist', 'G981', 7, [[1, 3], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G982', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [2, 7], [3, 4], [3, 5], [3, 6], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G983', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 2], [3, 5], [6, 3], [1, 6], [5, 4], [7, 6], [7, 5], [4, 6]]], ['edgelist', 'G984', 7, [[1, 3], [1, 7], [2, 3], [2, 5], [2, 6], [3, 4], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G985', 7, [[1, 3], [1, 7], [2, 4], [2, 5], [2, 6], [3, 5], [3, 6], [4, 5], [4, 6], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G986', 7, [[1, 3], [1, 7], [2, 4], [2, 5], [2, 6], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G987', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [2, 7], [3, 4], [3, 5], [3, 7], [4, 5], [4, 6], [5, 6], [6, 7]]], ['edgelist', 'G988', 7, [[1, 6], [1, 7], [2, 3], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G989', 7, [[4, 1], [3, 4], [5, 3], [1, 5], [6, 2], [6, 3], [7, 2], [7, 1], [4, 7], [6, 4], [5, 6], [7, 5]]], ['edgelist', 'G990', 7, [[1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G991', 7, [[1, 2], [1, 3], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G992', 7, [[4, 1], [3, 4], [5, 3], [1, 5], [6, 2], [6, 3], [7, 2], [7, 1], [4, 7], [6, 4], [7, 5], [2, 4]]], ['edgelist', 'G993', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G994', 7, [[3, 4], [5, 3], [6, 3], [5, 2], [7, 1], [4, 1], [4, 2], [7, 4], [6, 7], [2, 6], [5, 1], [4, 5]]], ['edgelist', 'G995', 7, [[3, 4], [5, 3], [5, 2], [3, 6], [7, 1], [7, 5], [4, 2], [7, 4], [1, 4], [2, 6], [5, 1], [6, 4]]], ['edgelist', 'G996', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 7], [3, 4], [3, 7], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G997', 7, [[4, 1], [3, 4], [5, 3], [1, 5], [6, 2], [6, 3], [7, 2], [7, 1], [4, 7], [2, 4], [7, 5], [6, 5]]], ['edgelist', 'G998', 7, [[7, 4], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [1, 7], [5, 2], [4, 5], [7, 6], [6, 2], [1, 5]]], ['edgelist', 'G999', 7, [[1, 4], [1, 6], [1, 7], [2, 3], [2, 6], [2, 7], [3, 5], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7]]], ['edgelist', 'G1000', 7, [[1, 4], [1, 6], [1, 7], [2, 3], [2, 5], [2, 7], [3, 4], [3, 6], [4, 5], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1001', 7, [[1, 4], [1, 6], [1, 7], [2, 3], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1002', 7, [[1, 5], [4, 1], [2, 4], [5, 2], [2, 1], [5, 6], [3, 5], [7, 3], [6, 7], [3, 6], [4, 3], [7, 4]]], ['edgelist', 'G1003', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [6, 3], [6, 5], [7, 5], [7, 4], [7, 3], [6, 4]]], ['edgelist', 'G1004', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [5, 6], [5, 7]]], ['edgelist', 'G1005', 7, [[4, 1], [5, 3], [4, 2], [5, 1], [6, 3], [6, 2], [5, 4], [6, 5], [4, 6], [7, 2], [1, 7], [3, 7]]], ['edgelist', 'G1006', 7, [[2, 1], [5, 2], [1, 5], [6, 1], [7, 6], [2, 7], [4, 5], [6, 4], [3, 4], [6, 3], [7, 4], [3, 7]]], ['edgelist', 'G1007', 7, [[1, 2], [3, 1], [3, 4], [4, 5], [1, 5], [1, 6], [7, 2], [5, 7], [7, 6], [3, 7], [4, 2], [6, 4]]], ['edgelist', 'G1008', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [7, 1], [7, 2], [7, 3], [7, 4], [7, 5], [7, 6]]], ['edgelist', 'G1009', 7, [[4, 7], [2, 3], [1, 7], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 2], [3, 6], [5, 6], [7, 5]]], ['edgelist', 'G1010', 7, [[2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1011', 7, [[2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1012', 7, [[1, 7], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1013', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [5, 4], [1, 3], [7, 5]]], ['edgelist', 'G1014', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [1, 5], [2, 7]]], ['edgelist', 'G1015', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1016', 7, [[1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1017', 7, [[1, 4], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1018', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1019', 7, [[1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1020', 7, [[1, 7], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1021', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3], [5, 6], [2, 7]]], ['edgelist', 'G1022', 7, [[1, 2], [2, 3], [3, 4], [5, 6], [1, 5], [2, 4], [5, 2], [3, 5], [1, 4], [6, 4], [5, 4], [1, 3], [6, 7]]], ['edgelist', 'G1023', 7, [[1, 6], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1024', 7, [[1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1025', 7, [[6, 7], [1, 6], [7, 1], [5, 7], [6, 5], [2, 6], [7, 2], [4, 7], [6, 4], [3, 6], [7, 3], [2, 1], [3, 2]]], ['edgelist', 'G1026', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 7]]], ['edgelist', 'G1027', 7, [[4, 5], [1, 4], [5, 1], [2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [7, 1], [4, 7], [6, 4], [5, 6]]], ['edgelist', 'G1028', 7, [[4, 5], [1, 4], [5, 1], [2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [7, 1], [4, 7], [6, 4], [1, 6]]], ['edgelist', 'G1029', 7, [[4, 5], [1, 4], [5, 1], [2, 5], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [7, 5], [1, 7], [6, 1], [4, 6]]], ['edgelist', 'G1030', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1031', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 6], [5, 7]]], ['edgelist', 'G1032', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [6, 2], [7, 6], [2, 7]]], ['edgelist', 'G1033', 7, [[1, 5], [1, 7], [2, 4], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1034', 7, [[1, 6], [1, 7], [2, 5], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1035', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1036', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [6, 4], [7, 6], [5, 7]]], ['edgelist', 'G1037', 7, [[1, 6], [1, 7], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1038', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [7, 2], [7, 6], [6, 2]]], ['edgelist', 'G1039', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 5], [1, 6], [7, 1], [4, 7], [7, 5]]], ['edgelist', 'G1040', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [7, 2], [6, 2], [3, 7]]], ['edgelist', 'G1041', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [6, 7]]], ['edgelist', 'G1042', 7, [[2, 1], [3, 2], [5, 3], [2, 5], [4, 2], [1, 4], [3, 4], [6, 3], [2, 6], [1, 6], [7, 1], [2, 7], [3, 7]]], ['edgelist', 'G1043', 7, [[3, 6], [7, 3], [6, 7], [5, 6], [4, 5], [1, 4], [5, 1], [2, 5], [4, 2], [7, 4], [3, 2], [5, 3], [4, 3]]], ['edgelist', 'G1044', 7, [[1, 4], [1, 7], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1045', 7, [[3, 5], [4, 3], [2, 4], [5, 2], [1, 5], [4, 1], [7, 4], [2, 7], [6, 2], [5, 6], [7, 5], [4, 6], [2, 3]]], ['edgelist', 'G1046', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 5], [3, 6], [4, 6], [4, 7]]], ['edgelist', 'G1047', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 6], [6, 7]]], ['edgelist', 'G1048', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 4], [3, 6], [4, 7], [5, 6]]], ['edgelist', 'G1049', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 7], [3, 5], [3, 6], [4, 5], [4, 6]]], ['edgelist', 'G1050', 7, [[1, 3], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1051', 7, [[3, 6], [2, 3], [6, 2], [5, 6], [4, 5], [1, 4], [5, 1], [4, 3], [5, 3], [2, 4], [7, 4], [3, 7], [2, 7]]], ['edgelist', 'G1052', 7, [[1, 5], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1053', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [1, 5], [7, 2], [5, 7]]], ['edgelist', 'G1054', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3], [6, 1], [7, 6], [2, 7], [5, 1]]], ['edgelist', 'G1055', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [2, 4], [4, 3], [5, 1], [3, 6], [4, 5], [6, 4], [1, 6], [7, 5], [7, 2]]], ['edgelist', 'G1056', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3], [7, 3], [6, 7], [1, 6], [2, 3]]], ['edgelist', 'G1057', 7, [[6, 5], [7, 3], [7, 5], [5, 4], [6, 1], [4, 2], [4, 3], [7, 4], [6, 7], [5, 1], [2, 5], [6, 2], [1, 4]]], ['edgelist', 'G1058', 7, [[1, 3], [1, 7], [2, 4], [2, 5], [2, 6], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1059', 7, [[2, 6], [5, 2], [1, 5], [6, 1], [3, 6], [5, 3], [4, 5], [6, 4], [1, 2], [3, 1], [4, 3], [7, 6], [7, 5]]], ['edgelist', 'G1060', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [1, 5], [1, 7], [5, 7]]], ['edgelist', 'G1061', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [6, 1], [5, 6], [2, 6], [7, 2], [1, 7], [4, 7]]], ['edgelist', 'G1062', 7, [[1, 6], [1, 7], [2, 3], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1063', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [5, 2], [6, 4], [3, 6], [2, 1], [7, 4], [5, 7], [6, 2]]], ['edgelist', 'G1064', 7, [[6, 3], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [7, 3], [7, 4], [2, 3], [4, 2], [5, 1]]], ['edgelist', 'G1065', 7, [[2, 1], [3, 2], [1, 3], [1, 4], [4, 3], [7, 3], [2, 7], [6, 2], [7, 6], [5, 7], [6, 5], [1, 6], [5, 1]]], ['edgelist', 'G1066', 7, [[1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7]]], ['edgelist', 'G1067', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1068', 7, [[1, 2], [2, 3], [5, 2], [4, 2], [1, 5], [3, 4], [1, 4], [3, 1], [6, 1], [7, 6], [5, 7], [4, 6], [5, 3]]], ['edgelist', 'G1069', 7, [[1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1070', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [1, 5], [3, 4], [7, 6], [1, 7]]], ['edgelist', 'G1071', 7, [[6, 3], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [7, 3], [7, 4], [3, 4], [6, 1], [5, 1]]], ['edgelist', 'G1072', 7, [[1, 2], [2, 3], [3, 4], [6, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [7, 4], [7, 5], [5, 3], [1, 4]]], ['edgelist', 'G1073', 7, [[1, 2], [1, 7], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1074', 7, [[1, 2], [1, 7], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1075', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [7, 3], [1, 7], [6, 1], [3, 6], [6, 4], [5, 6], [7, 5], [4, 7]]], ['edgelist', 'G1076', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [7, 6], [1, 7], [1, 3], [3, 6], [6, 4], [5, 6], [7, 5], [4, 7]]], ['edgelist', 'G1077', 7, [[4, 5], [1, 4], [5, 1], [4, 7], [4, 2], [3, 4], [5, 3], [2, 1], [3, 2], [6, 3], [4, 6], [7, 3], [6, 7]]], ['edgelist', 'G1078', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [7, 6], [4, 7], [5, 7]]], ['edgelist', 'G1079', 7, [[2, 1], [3, 2], [7, 1], [2, 5], [4, 2], [1, 4], [3, 4], [2, 7], [2, 6], [3, 7], [5, 4], [6, 5], [7, 6]]], ['edgelist', 'G1080', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 6], [3, 7], [4, 6], [5, 7]]], ['edgelist', 'G1081', 7, [[1, 7], [2, 3], [3, 4], [1, 4], [5, 3], [6, 1], [7, 4], [5, 2], [4, 5], [7, 6], [2, 6], [4, 6], [2, 4]]], ['edgelist', 'G1082', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [7, 5], [5, 4], [6, 5], [6, 3], [7, 1], [4, 7], [4, 6]]], ['edgelist', 'G1083', 7, [[1, 2], [2, 3], [3, 4], [4, 7], [1, 5], [7, 6], [1, 7], [7, 5], [3, 6], [6, 4], [5, 6], [2, 6], [7, 2]]], ['edgelist', 'G1084', 7, [[1, 5], [1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1085', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1086', 7, [[3, 4], [6, 3], [7, 6], [4, 7], [5, 4], [6, 5], [1, 6], [3, 1], [2, 3], [1, 2], [6, 2], [5, 3], [7, 5]]], ['edgelist', 'G1087', 7, [[3, 2], [1, 6], [7, 1], [5, 7], [6, 5], [2, 6], [7, 2], [4, 7], [6, 4], [3, 6], [7, 3], [2, 1], [4, 5]]], ['edgelist', 'G1088', 7, [[1, 2], [3, 1], [3, 4], [4, 5], [1, 5], [1, 6], [7, 2], [5, 7], [7, 6], [3, 7], [4, 2], [6, 4], [7, 1]]], ['edgelist', 'G1089', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 6], [2, 7], [3, 4], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1090', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [5, 7], [6, 2], [5, 6], [4, 2], [6, 3], [7, 1], [7, 2], [3, 2], [5, 2]]], ['edgelist', 'G1091', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 4], [6, 5], [6, 3], [6, 2], [7, 6], [2, 7], [3, 7]]], ['edgelist', 'G1092', 7, [[1, 5], [1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7]]], ['edgelist', 'G1093', 7, [[4, 1], [3, 4], [5, 3], [1, 5], [6, 2], [6, 3], [7, 2], [7, 1], [4, 7], [2, 4], [7, 5], [6, 5], [6, 4]]], ['edgelist', 'G1094', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 5], [2, 6], [3, 4], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1095', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [4, 5], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1096', 7, [[1, 3], [1, 6], [1, 7], [2, 3], [2, 5], [2, 7], [3, 4], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1097', 7, [[4, 5], [6, 1], [4, 6], [1, 7], [7, 5], [3, 4], [5, 3], [2, 1], [3, 2], [2, 7], [6, 2], [3, 6], [7, 3]]], ['edgelist', 'G1098', 7, [[1, 3], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1099', 7, [[4, 1], [3, 4], [5, 3], [1, 5], [6, 4], [6, 3], [6, 5], [2, 4], [2, 1], [5, 2], [7, 1], [4, 7], [2, 7]]], ['edgelist', 'G1100', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [7, 1], [2, 7], [7, 4], [5, 7], [2, 3], [6, 1]]], ['edgelist', 'G1101', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7]]], ['edgelist', 'G1102', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6]]], ['edgelist', 'G1103', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [6, 4], [6, 5], [7, 5], [7, 3], [7, 6], [6, 3], [4, 7]]], ['edgelist', 'G1104', 7, [[1, 2], [1, 6], [1, 7], [2, 4], [2, 5], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1105', 7, [[1, 2], [1, 6], [1, 7], [2, 4], [2, 5], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1106', 7, [[1, 2], [3, 1], [3, 4], [4, 5], [1, 5], [1, 6], [7, 2], [5, 7], [7, 6], [3, 7], [4, 2], [6, 4], [3, 2]]], ['edgelist', 'G1107', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [2, 4], [3, 1], [5, 1], [6, 4]]], ['edgelist', 'G1108', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [3, 4], [1, 5], [2, 7]]], ['edgelist', 'G1109', 7, [[1, 7], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1110', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1111', 7, [[1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1112', 7, [[1, 4], [2, 3], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1113', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [4, 7], [6, 4], [5, 6], [7, 5]]], ['edgelist', 'G1114', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [7, 3], [2, 7], [6, 2], [1, 6]]], ['edgelist', 'G1115', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [6, 3], [4, 6], [7, 5], [1, 7]]], ['edgelist', 'G1116', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [3, 5], [6, 2], [1, 4], [2, 5], [1, 2], [1, 5], [7, 5], [1, 7]]], ['edgelist', 'G1117', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [4, 5], [4, 6], [5, 7]]], ['edgelist', 'G1118', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 7], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1119', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1120', 7, [[4, 5], [2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [1, 2], [6, 2], [1, 5], [2, 5], [6, 4], [3, 6], [7, 5], [1, 7]]], ['edgelist', 'G1121', 7, [[2, 4], [3, 2], [1, 3], [6, 1], [5, 6], [4, 5], [6, 4], [3, 6], [2, 1], [5, 2], [7, 2], [6, 2], [3, 7], [1, 5]]], ['edgelist', 'G1122', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 7], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1123', 7, [[3, 4], [5, 3], [7, 4], [5, 1], [7, 1], [4, 5], [4, 2], [6, 5], [6, 1], [1, 4], [2, 6], [6, 4], [7, 5], [7, 2]]], ['edgelist', 'G1124', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [6, 4], [7, 6], [5, 7], [6, 5]]], ['edgelist', 'G1125', 7, [[4, 2], [2, 5], [3, 4], [4, 5], [1, 5], [2, 6], [1, 2], [1, 3], [3, 6], [6, 4], [5, 6], [2, 3], [7, 3], [6, 7]]], ['edgelist', 'G1126', 7, [[1, 4], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1127', 7, [[1, 4], [1, 7], [2, 3], [2, 5], [2, 6], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1128', 7, [[1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1129', 7, [[1, 2], [1, 7], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1130', 7, [[1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1131', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [5, 7], [6, 3], [2, 6], [1, 6], [7, 4]]], ['edgelist', 'G1132', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [5, 3], [4, 1], [7, 2], [6, 7]]], ['edgelist', 'G1133', 7, [[1, 5], [1, 7], [2, 3], [2, 4], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1134', 7, [[1, 5], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1135', 7, [[1, 6], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1136', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [2, 3], [6, 3], [5, 1], [4, 2], [6, 1], [7, 6], [7, 5], [1, 2]]], ['edgelist', 'G1137', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [6, 3], [7, 1], [7, 2], [6, 1], [2, 3], [4, 2], [5, 1]]], ['edgelist', 'G1138', 7, [[6, 7], [1, 6], [7, 1], [5, 7], [6, 5], [2, 6], [7, 2], [4, 7], [6, 4], [3, 6], [7, 3], [2, 1], [3, 2], [4, 5]]], ['edgelist', 'G1139', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 6], [2, 7], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1140', 7, [[1, 2], [3, 1], [3, 4], [4, 5], [1, 5], [1, 6], [7, 2], [5, 7], [7, 6], [3, 7], [4, 2], [6, 4], [7, 1], [4, 7]]], ['edgelist', 'G1141', 7, [[4, 2], [5, 3], [5, 6], [5, 1], [2, 5], [1, 4], [6, 1], [6, 3], [7, 2], [4, 7], [7, 1], [6, 7], [7, 3], [5, 7]]], ['edgelist', 'G1142', 7, [[1, 5], [4, 1], [3, 4], [5, 3], [2, 5], [4, 2], [2, 1], [3, 2], [6, 5], [2, 6], [7, 2], [4, 7], [1, 6], [7, 1]]], ['edgelist', 'G1143', 7, [[4, 5], [5, 3], [2, 6], [5, 1], [2, 5], [6, 4], [4, 1], [6, 3], [7, 5], [1, 7], [4, 7], [3, 7], [6, 7], [2, 7]]], ['edgelist', 'G1144', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 5], [2, 6], [3, 4], [3, 7], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1145', 7, [[3, 4], [5, 3], [7, 4], [5, 1], [5, 6], [4, 5], [4, 2], [6, 3], [2, 7], [6, 7], [7, 1], [6, 4], [7, 5], [1, 2]]], ['edgelist', 'G1146', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1147', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1148', 7, [[3, 4], [5, 3], [7, 4], [5, 1], [2, 5], [7, 1], [4, 2], [6, 3], [5, 6], [6, 7], [2, 6], [6, 4], [7, 5], [4, 1]]], ['edgelist', 'G1149', 7, [[4, 2], [5, 3], [1, 4], [5, 1], [2, 5], [6, 4], [6, 1], [6, 3], [7, 5], [2, 7], [7, 4], [1, 7], [7, 6], [3, 7]]], ['edgelist', 'G1150', 7, [[1, 2], [5, 3], [4, 1], [5, 1], [5, 6], [6, 4], [2, 4], [6, 3], [7, 5], [3, 7], [7, 6], [4, 7], [7, 2], [1, 7]]], ['edgelist', 'G1151', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 7], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1152', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 6], [2, 7], [3, 4], [3, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1153', 7, [[3, 4], [5, 3], [7, 4], [5, 1], [2, 5], [7, 2], [4, 2], [6, 3], [6, 1], [6, 7], [5, 6], [6, 4], [7, 1], [4, 1]]], ['edgelist', 'G1154', 7, [[3, 4], [5, 3], [4, 1], [5, 1], [5, 6], [4, 5], [4, 2], [6, 3], [1, 2], [6, 7], [7, 1], [6, 4], [7, 5], [2, 7]]], ['edgelist', 'G1155', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1156', 7, [[1, 4], [1, 5], [1, 7], [2, 3], [2, 5], [2, 6], [3, 4], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1157', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1158', 7, [[1, 2], [1, 6], [1, 7], [2, 5], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1159', 7, [[1, 2], [1, 5], [1, 7], [2, 4], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1160', 7, [[3, 4], [5, 3], [7, 4], [5, 1], [2, 5], [5, 6], [4, 2], [6, 3], [6, 1], [7, 2], [1, 7], [6, 4], [7, 5], [4, 1]]], ['edgelist', 'G1161', 7, [[1, 2], [1, 6], [1, 7], [2, 4], [2, 5], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1162', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [3, 4], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1163', 7, [[1, 5], [1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [6, 7]]], ['edgelist', 'G1164', 7, [[3, 4], [5, 3], [7, 4], [5, 1], [5, 6], [4, 6], [4, 2], [6, 3], [4, 1], [2, 5], [7, 1], [2, 7], [7, 5], [1, 2]]], ['edgelist', 'G1165', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 7]]], ['edgelist', 'G1166', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1167', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 7], [4, 5], [4, 6], [5, 7], [6, 7]]], ['edgelist', 'G1168', 7, [[1, 4], [1, 5], [1, 6], [2, 3], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1169', 7, [[1, 4], [1, 5], [1, 6], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 7], [5, 7], [6, 7]]], ['edgelist', 'G1170', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7], [1, 7], [1, 3], [6, 1], [4, 6], [2, 4], [7, 2], [5, 7], [3, 5]]], ['edgelist', 'G1171', 7, [[1, 4], [1, 5], [1, 6], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 7], [5, 6]]], ['edgelist', 'G1172', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [5, 6]]], ['edgelist', 'G1173', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [2, 4], [3, 1], [5, 1], [6, 4], [2, 7]]], ['edgelist', 'G1174', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [3, 5], [6, 3], [2, 6], [2, 5], [2, 4], [3, 1], [5, 1], [6, 4], [1, 7]]], ['edgelist', 'G1175', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [4, 5], [4, 6], [4, 7], [6, 7]]], ['edgelist', 'G1176', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 6], [2, 7], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1177', 7, [[4, 5], [5, 6], [1, 4], [1, 5], [1, 6], [1, 7], [4, 7], [5, 7], [6, 7], [2, 6], [4, 6], [3, 4], [3, 5], [2, 7], [1, 2]]], ['edgelist', 'G1178', 7, [[4, 5], [5, 6], [1, 4], [1, 5], [1, 6], [1, 7], [4, 7], [2, 4], [2, 5], [2, 6], [4, 6], [3, 4], [3, 5], [7, 2], [5, 7]]], ['edgelist', 'G1179', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 2], [5, 3], [5, 4], [6, 2], [1, 6], [6, 3], [4, 6], [5, 6], [7, 2], [6, 7]]], ['edgelist', 'G1180', 7, [[5, 4], [5, 6], [6, 4], [1, 2], [1, 6], [1, 4], [3, 5], [2, 6], [2, 4], [7, 6], [4, 7], [7, 1], [2, 7], [7, 3], [5, 7]]], ['edgelist', 'G1181', 7, [[4, 5], [5, 6], [6, 7], [1, 5], [1, 6], [1, 7], [4, 7], [2, 4], [5, 7], [2, 6], [4, 6], [3, 4], [3, 5], [2, 7], [1, 2]]], ['edgelist', 'G1182', 7, [[1, 3], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1183', 7, [[7, 2], [5, 6], [1, 4], [1, 5], [1, 6], [1, 7], [4, 7], [2, 4], [2, 5], [2, 6], [4, 6], [3, 4], [3, 5], [6, 7], [5, 7]]], ['edgelist', 'G1184', 7, [[4, 5], [5, 6], [1, 4], [1, 5], [1, 6], [5, 7], [4, 7], [2, 4], [2, 5], [2, 6], [4, 6], [3, 4], [3, 5], [6, 7], [6, 3]]], ['edgelist', 'G1185', 7, [[4, 5], [5, 6], [1, 4], [1, 5], [7, 1], [5, 7], [4, 7], [2, 4], [2, 5], [2, 6], [4, 6], [3, 4], [3, 5], [6, 7], [6, 3]]], ['edgelist', 'G1186', 7, [[1, 2], [2, 3], [1, 3], [4, 1], [2, 4], [3, 4], [6, 2], [4, 6], [5, 4], [3, 5], [7, 3], [4, 7], [7, 2], [1, 6], [5, 1]]], ['edgelist', 'G1187', 7, [[1, 2], [3, 1], [4, 3], [5, 4], [2, 5], [4, 2], [5, 3], [1, 5], [4, 1], [7, 4], [5, 7], [6, 5], [4, 6], [7, 3], [6, 2]]], ['edgelist', 'G1188', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [2, 3], [2, 4], [2, 5], [3, 4], [3, 5], [4, 5], [6, 4], [5, 6], [7, 5], [6, 7], [7, 4]]], ['edgelist', 'G1189', 7, [[1, 2], [2, 3], [3, 4], [1, 4], [5, 1], [5, 4], [5, 3], [7, 2], [3, 7], [6, 3], [5, 6], [7, 5], [1, 7], [7, 6], [4, 7]]], ['edgelist', 'G1190', 7, [[1, 2], [6, 4], [2, 4], [1, 5], [4, 1], [5, 4], [3, 5], [6, 3], [5, 6], [7, 5], [3, 7], [7, 6], [4, 7], [7, 2], [1, 7]]], ['edgelist', 'G1191', 7, [[6, 3], [5, 6], [4, 2], [1, 5], [1, 6], [1, 4], [3, 5], [2, 6], [2, 5], [7, 4], [2, 7], [7, 1], [6, 7], [7, 3], [5, 7]]], ['edgelist', 'G1192', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [1, 6], [1, 4], [3, 1], [4, 2], [7, 5], [6, 7], [7, 4], [1, 7], [7, 3], [2, 7]]], ['edgelist', 'G1193', 7, [[6, 3], [4, 1], [6, 4], [1, 5], [1, 6], [5, 4], [3, 5], [2, 6], [2, 5], [7, 5], [3, 7], [7, 1], [6, 7], [7, 4], [2, 7]]], ['edgelist', 'G1194', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 7], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1195', 7, [[7, 2], [5, 6], [1, 4], [1, 5], [1, 6], [5, 7], [4, 7], [2, 4], [2, 5], [1, 7], [4, 6], [3, 4], [3, 5], [6, 7], [6, 3]]], ['edgelist', 'G1196', 7, [[4, 5], [1, 2], [1, 4], [1, 5], [1, 6], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [2, 7], [3, 4], [3, 5], [6, 7], [6, 3]]], ['edgelist', 'G1197', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 7]]], ['edgelist', 'G1198', 7, [[6, 3], [5, 6], [6, 4], [1, 5], [1, 2], [2, 4], [3, 5], [4, 1], [2, 5], [7, 5], [2, 7], [7, 1], [4, 7], [7, 3], [6, 7]]], ['edgelist', 'G1199', 7, [[6, 1], [5, 4], [6, 4], [6, 3], [1, 2], [2, 4], [3, 5], [4, 1], [2, 5], [7, 3], [6, 7], [7, 5], [4, 7], [7, 1], [2, 7]]], ['edgelist', 'G1200', 7, [[4, 5], [5, 6], [1, 4], [5, 7], [1, 2], [2, 7], [4, 7], [2, 4], [2, 5], [7, 1], [1, 6], [3, 4], [3, 5], [6, 7], [6, 3]]], ['edgelist', 'G1201', 7, [[1, 3], [1, 4], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1202', 7, [[1, 5], [1, 6], [1, 7], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1203', 7, [[4, 5], [6, 1], [1, 4], [1, 5], [5, 7], [2, 7], [4, 7], [2, 4], [2, 5], [7, 1], [2, 6], [3, 4], [3, 5], [6, 7], [6, 3]]], ['edgelist', 'G1204', 7, [[7, 5], [6, 3], [1, 4], [1, 5], [3, 5], [2, 7], [4, 7], [2, 4], [2, 5], [7, 1], [4, 6], [3, 4], [1, 2], [6, 7], [5, 6]]], ['edgelist', 'G1205', 7, [[1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 6], [2, 7], [3, 4], [3, 5], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1206', 7, [[1, 2], [1, 3], [1, 4], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1207', 7, [[3, 4], [1, 3], [4, 1], [5, 4], [2, 5], [6, 2], [5, 6], [2, 1], [6, 3], [7, 3], [6, 7], [7, 2], [1, 7], [7, 5], [4, 7]]], ['edgelist', 'G1208', 7, [[4, 1], [4, 6], [4, 5], [3, 1], [3, 6], [3, 5], [2, 5], [2, 6], [2, 1], [7, 1], [2, 7], [7, 6], [4, 7], [7, 5], [3, 7]]], ['edgelist', 'G1209', 7, [[1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1210', 7, [[4, 5], [7, 3], [1, 4], [1, 5], [6, 1], [2, 7], [4, 7], [2, 4], [2, 5], [7, 1], [5, 6], [3, 4], [3, 5], [6, 2], [6, 3]]], ['edgelist', 'G1211', 7, [[4, 5], [7, 3], [1, 4], [1, 5], [6, 1], [6, 7], [4, 7], [2, 4], [2, 5], [1, 2], [5, 7], [3, 4], [3, 5], [6, 2], [6, 3]]], ['edgelist', 'G1212', 7, [[1, 2], [2, 3], [3, 4], [4, 5], [1, 5], [7, 3], [2, 7], [7, 1], [5, 7], [6, 5], [1, 6], [4, 6], [7, 4], [6, 3], [2, 6]]], ['edgelist', 'G1213', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [5, 6], [3, 7]]], ['edgelist', 'G1214', 7, [[4, 1], [5, 2], [5, 4], [2, 4], [5, 1], [3, 6], [7, 3], [6, 7], [2, 6], [5, 6], [4, 6], [1, 6], [1, 7], [4, 7], [5, 7], [7, 2]]], ['edgelist', 'G1215', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 7], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1216', 7, [[1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1217', 7, [[4, 5], [6, 2], [1, 4], [1, 5], [6, 1], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [4, 6], [3, 4], [3, 5], [6, 7], [5, 6], [3, 6]]], ['edgelist', 'G1218', 7, [[3, 5], [4, 2], [4, 1], [5, 4], [5, 1], [6, 3], [5, 6], [6, 1], [4, 6], [6, 2], [7, 6], [2, 7], [4, 7], [7, 1], [5, 7], [7, 3]]], ['edgelist', 'G1219', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1220', 7, [[3, 5], [5, 2], [4, 1], [4, 2], [5, 1], [6, 3], [5, 6], [6, 1], [4, 6], [7, 6], [2, 6], [7, 2], [4, 7], [5, 7], [7, 3], [7, 1]]], ['edgelist', 'G1221', 7, [[1, 2], [1, 4], [1, 6], [1, 7], [2, 4], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1222', 7, [[3, 6], [1, 2], [5, 6], [2, 4], [6, 1], [5, 4], [6, 4], [3, 5], [2, 5], [4, 1], [7, 4], [3, 7], [7, 5], [6, 7], [7, 2], [1, 7]]], ['edgelist', 'G1223', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 4], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1224', 7, [[3, 6], [6, 2], [4, 2], [1, 5], [6, 1], [5, 4], [6, 4], [3, 5], [2, 5], [4, 1], [7, 3], [5, 7], [6, 7], [7, 2], [1, 7], [4, 7]]], ['edgelist', 'G1225', 7, [[2, 7], [1, 2], [1, 4], [1, 5], [6, 1], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [4, 6], [3, 4], [3, 5], [6, 7], [5, 6], [3, 6]]], ['edgelist', 'G1226', 7, [[4, 5], [6, 2], [1, 4], [1, 5], [6, 1], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [2, 7], [3, 4], [3, 5], [6, 7], [1, 2], [3, 6]]], ['edgelist', 'G1227', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6]]], ['edgelist', 'G1228', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 4], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [6, 7]]], ['edgelist', 'G1229', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 5], [2, 6], [2, 7], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [6, 7]]], ['edgelist', 'G1230', 7, [[3, 6], [6, 2], [4, 6], [1, 5], [1, 2], [5, 4], [4, 3], [3, 5], [2, 5], [1, 6], [7, 5], [3, 7], [7, 4], [6, 7], [7, 2], [1, 7]]], ['edgelist', 'G1231', 7, [[6, 7], [6, 2], [1, 4], [1, 5], [1, 2], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [4, 6], [3, 4], [3, 5], [7, 3], [5, 6], [3, 6]]], ['edgelist', 'G1232', 7, [[4, 5], [6, 2], [1, 4], [1, 5], [1, 2], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [6, 1], [3, 4], [3, 5], [7, 3], [7, 6], [3, 6]]], ['edgelist', 'G1233', 7, [[6, 1], [6, 2], [1, 4], [1, 5], [7, 2], [5, 7], [4, 7], [2, 4], [2, 5], [7, 1], [4, 6], [3, 4], [3, 5], [7, 3], [5, 6], [3, 6]]], ['edgelist', 'G1234', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [5, 6], [7, 3], [2, 7]]], ['edgelist', 'G1235', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [5, 6], [5, 7]]], ['edgelist', 'G1236', 7, [[5, 1], [5, 4], [1, 2], [4, 1], [3, 5], [4, 2], [6, 4], [5, 6], [6, 3], [7, 6], [6, 1], [2, 6], [7, 2], [1, 7], [7, 5], [3, 7], [4, 7]]], ['edgelist', 'G1237', 7, [[1, 2], [6, 2], [6, 4], [1, 5], [6, 1], [5, 4], [4, 2], [3, 6], [2, 5], [4, 1], [3, 5], [7, 3], [6, 7], [7, 4], [2, 7], [7, 1], [5, 7]]], ['edgelist', 'G1238', 7, [[4, 5], [6, 2], [1, 4], [1, 5], [5, 6], [5, 7], [4, 7], [2, 4], [2, 5], [1, 2], [4, 6], [3, 4], [3, 5], [3, 6], [6, 1], [6, 7], [7, 3]]], ['edgelist', 'G1239', 7, [[4, 3], [5, 2], [1, 2], [4, 1], [3, 5], [5, 4], [6, 2], [5, 6], [6, 3], [1, 6], [6, 4], [7, 6], [3, 7], [7, 4], [1, 7], [2, 7], [5, 7]]], ['edgelist', 'G1240', 7, [[4, 3], [5, 2], [5, 1], [4, 1], [3, 5], [4, 2], [6, 3], [5, 6], [6, 1], [4, 6], [6, 2], [7, 6], [3, 7], [7, 1], [4, 7], [7, 5], [2, 7]]], ['edgelist', 'G1241', 7, [[4, 3], [6, 2], [6, 1], [1, 5], [1, 2], [5, 4], [6, 4], [3, 6], [2, 5], [4, 1], [3, 5], [7, 5], [6, 7], [7, 3], [4, 7], [7, 1], [2, 7]]], ['edgelist', 'G1242', 7, [[4, 3], [6, 2], [6, 1], [1, 5], [5, 6], [1, 2], [4, 2], [3, 6], [2, 5], [4, 1], [3, 5], [7, 1], [4, 7], [7, 2], [6, 7], [7, 3], [5, 7]]], ['edgelist', 'G1243', 7, [[1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 6], [4, 7], [5, 6], [5, 7]]], ['edgelist', 'G1244', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [2, 3], [2, 4], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [4, 5], [4, 6], [5, 6], [7, 2], [1, 7], [6, 7]]], ['edgelist', 'G1245', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7]]], ['edgelist', 'G1246', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 5], [2, 6], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1247', 7, [[1, 2], [1, 3], [1, 4], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1248', 7, [[5, 1], [5, 6], [4, 1], [4, 6], [3, 1], [3, 6], [2, 4], [2, 5], [2, 6], [2, 1], [3, 4], [3, 5], [7, 1], [6, 7], [7, 2], [3, 7], [7, 5], [4, 7]]], ['edgelist', 'G1249', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1250', 7, [[1, 2], [1, 3], [1, 4], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1251', 7, [[1, 2], [1, 3], [1, 4], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]], ['edgelist', 'G1252', 7, [[1, 2], [1, 3], [1, 4], [1, 5], [1, 6], [1, 7], [2, 3], [2, 4], [2, 5], [2, 6], [2, 7], [3, 4], [3, 5], [3, 6], [3, 7], [4, 5], [4, 6], [4, 7], [5, 6], [5, 7], [6, 7]]]] GAG=[] for i in range(1253): g=make_small_graph(descr_list[i]) GAG.append(g) return GAG networkx-1.8.1/networkx/generators/__init__.py0000664000175000017500000000136412177456333021444 0ustar aricaric00000000000000""" A package for generating various graphs in networkx. """ from networkx.generators.atlas import * from networkx.generators.bipartite import * from networkx.generators.classic import * from networkx.generators.degree_seq import * from networkx.generators.directed import * from networkx.generators.ego import * from networkx.generators.geometric import * from networkx.generators.hybrid import * from networkx.generators.line import * from networkx.generators.random_graphs import * from networkx.generators.small import * from networkx.generators.stochastic import * from networkx.generators.social import * from networkx.generators.threshold import * from networkx.generators.intersection import * from networkx.generators.random_clustered import * networkx-1.8.1/networkx/generators/random_clustered.py0000664000175000017500000000777512177456333023253 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """Generate graphs with given degree and triangle sequence. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import random import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Joel Miller (joel.c.miller.research@gmail.com)']) __all__ = ['random_clustered_graph'] def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): """Generate a random graph with the given joint degree and triangle degree sequence. This uses a configuration model-like approach to generate a random pseudograph (graph with parallel edges and self loops) by randomly assigning edges to match the given indepdenent edge and triangle degree sequence. Parameters ---------- joint_degree_sequence : list of integer pairs Each list entry corresponds to the independent edge degree and triangle degree of a node. create_using : graph, optional (default MultiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional The seed for the random number generator. Returns ------- G : MultiGraph A graph with the specified degree sequence. Nodes are labeled starting at 0 with an index corresponding to the position in deg_sequence. Raises ------ NetworkXError If the independent edge degree sequence sum is not even or the triangle degree sequence sum is not divisible by 3. Notes ----- As described by Miller [1]_ (see also Newman [2]_ for an equivalent description). A non-graphical degree sequence (not realizable by some simple graph) is allowed since this function returns graphs with self loops and parallel edges. An exception is raised if the independent degree sequence does not have an even sum or the triangle degree sequence sum is not divisible by 3. This configuration model-like construction process can lead to duplicate edges and loops. You can remove the self-loops and parallel edges (see below) which will likely result in a graph that doesn't have the exact degree sequence specified. This "finite-size effect" decreases as the size of the graph increases. References ---------- .. [1] J. C. Miller "Percolation and Epidemics on Random Clustered Graphs." Physical Review E, Rapid Communication (to appear). .. [2] M.E.J. Newman, "Random clustered networks". Physical Review Letters (to appear). Examples -------- >>> deg_tri=[[1,0],[1,0],[1,0],[2,0],[1,0],[2,1],[0,1],[0,1]] >>> G = nx.random_clustered_graph(deg_tri) To remove parallel edges: >>> G=nx.Graph(G) To remove self loops: >>> G.remove_edges_from(G.selfloop_edges()) """ if create_using is None: create_using = nx.MultiGraph() elif create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") if not seed is None: random.seed(seed) # In Python 3, zip() returns an iterator. Make this into a list. joint_degree_sequence = list(joint_degree_sequence) N = len(joint_degree_sequence) G = nx.empty_graph(N,create_using) ilist = [] tlist = [] for n in G: degrees = joint_degree_sequence[n] for icount in range(degrees[0]): ilist.append(n) for tcount in range(degrees[1]): tlist.append(n) if len(ilist)%2 != 0 or len(tlist)%3 != 0: raise nx.NetworkXError('Invalid degree sequence') random.shuffle(ilist) random.shuffle(tlist) while ilist: G.add_edge(ilist.pop(),ilist.pop()) while tlist: n1 = tlist.pop() n2 = tlist.pop() n3 = tlist.pop() G.add_edges_from([(n1,n2),(n1,n3),(n2,n3)]) G.name = "random_clustered %d nodes %d edges"%(G.order(),G.size()) return G networkx-1.8.1/networkx/generators/geometric.py0000664000175000017500000002635012177456333021665 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Generators for geometric graphs. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from __future__ import print_function __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Dan Schult (dschult@colgate.edu)', 'Ben Edwards (BJEdwards@gmail.com)']) __all__ = ['random_geometric_graph', 'waxman_graph', 'geographical_threshold_graph', 'navigable_small_world_graph'] from bisect import bisect_left from functools import reduce from itertools import product import math, random, sys import networkx as nx #--------------------------------------------------------------------------- # Random Geometric Graphs #--------------------------------------------------------------------------- def random_geometric_graph(n, radius, dim=2, pos=None): r"""Return the random geometric graph in the unit cube. The random geometric graph model places n nodes uniformly at random in the unit cube Two nodes `u,v` are connected with an edge if `d(u,v)<=r` where `d` is the Euclidean distance and `r` is a radius threshold. Parameters ---------- n : int Number of nodes radius: float Distance threshold value dim : int, optional Dimension of graph pos : dict, optional A dictionary keyed by node with node positions as values. Returns ------- Graph Examples -------- >>> G = nx.random_geometric_graph(20,0.1) Notes ----- This uses an `n^2` algorithm to build the graph. A faster algorithm is possible using k-d trees. The pos keyword can be used to specify node positions so you can create an arbitrary distribution and domain for positions. If you need a distance function other than Euclidean you'll have to hack the algorithm. E.g to use a 2d Gaussian distribution of node positions with mean (0,0) and std. dev. 2 >>> import random >>> n=20 >>> p=dict((i,(random.gauss(0,2),random.gauss(0,2))) for i in range(n)) >>> G = nx.random_geometric_graph(n,0.2,pos=p) References ---------- .. [1] Penrose, Mathew, Random Geometric Graphs, Oxford Studies in Probability, 5, 2003. """ G=nx.Graph() G.name="Random Geometric Graph" G.add_nodes_from(range(n)) if pos is None: # random positions for n in G: G.node[n]['pos']=[random.random() for i in range(0,dim)] else: nx.set_node_attributes(G,'pos',pos) # connect nodes within "radius" of each other # n^2 algorithm, could use a k-d tree implementation nodes = G.nodes(data=True) while nodes: u,du = nodes.pop() pu = du['pos'] for v,dv in nodes: pv = dv['pos'] d = sum(((a-b)**2 for a,b in zip(pu,pv))) if d <= radius**2: G.add_edge(u,v) return G def geographical_threshold_graph(n, theta, alpha=2, dim=2, pos=None, weight=None): r"""Return a geographical threshold graph. The geographical threshold graph model places n nodes uniformly at random in a rectangular domain. Each node `u` is assigned a weight `w_u`. Two nodes `u,v` are connected with an edge if .. math:: w_u + w_v \ge \theta r^{\alpha} where `r` is the Euclidean distance between `u` and `v`, and `\theta`, `\alpha` are parameters. Parameters ---------- n : int Number of nodes theta: float Threshold value alpha: float, optional Exponent of distance function dim : int, optional Dimension of graph pos : dict Node positions as a dictionary of tuples keyed by node. weight : dict Node weights as a dictionary of numbers keyed by node. Returns ------- Graph Examples -------- >>> G = nx.geographical_threshold_graph(20,50) Notes ----- If weights are not specified they are assigned to nodes by drawing randomly from an the exponential distribution with rate parameter `\lambda=1`. To specify a weights from a different distribution assign them to a dictionary and pass it as the weight= keyword >>> import random >>> n = 20 >>> w=dict((i,random.expovariate(5.0)) for i in range(n)) >>> G = nx.geographical_threshold_graph(20,50,weight=w) If node positions are not specified they are randomly assigned from the uniform distribution. References ---------- .. [1] Masuda, N., Miwa, H., Konno, N.: Geographical threshold graphs with small-world and scale-free properties. Physical Review E 71, 036108 (2005) .. [2] Milan Bradonjić, Aric Hagberg and Allon G. Percus, Giant component and connectivity in geographical threshold graphs, in Algorithms and Models for the Web-Graph (WAW 2007), Antony Bonato and Fan Chung (Eds), pp. 209--216, 2007 """ G=nx.Graph() # add n nodes G.add_nodes_from([v for v in range(n)]) if weight is None: # choose weights from exponential distribution for n in G: G.node[n]['weight'] = random.expovariate(1.0) else: nx.set_node_attributes(G,'weight',weight) if pos is None: # random positions for n in G: G.node[n]['pos']=[random.random() for i in range(0,dim)] else: nx.set_node_attributes(G,'pos',pos) G.add_edges_from(geographical_threshold_edges(G, theta, alpha)) return G def geographical_threshold_edges(G, theta, alpha=2): # generate edges for a geographical threshold graph given a graph # with positions and weights assigned as node attributes 'pos' and 'weight'. nodes = G.nodes(data=True) while nodes: u,du = nodes.pop() wu = du['weight'] pu = du['pos'] for v,dv in nodes: wv = dv['weight'] pv = dv['pos'] r = math.sqrt(sum(((a-b)**2 for a,b in zip(pu,pv)))) if wu+wv >= theta*r**alpha: yield(u,v) def waxman_graph(n, alpha=0.4, beta=0.1, L=None, domain=(0,0,1,1)): r"""Return a Waxman random graph. The Waxman random graph models place n nodes uniformly at random in a rectangular domain. Two nodes u,v are connected with an edge with probability .. math:: p = \alpha*exp(-d/(\beta*L)). This function implements both Waxman models. Waxman-1: `L` not specified The distance `d` is the Euclidean distance between the nodes u and v. `L` is the maximum distance between all nodes in the graph. Waxman-2: `L` specified The distance `d` is chosen randomly in `[0,L]`. Parameters ---------- n : int Number of nodes alpha: float Model parameter beta: float Model parameter L : float, optional Maximum distance between nodes. If not specified the actual distance is calculated. domain : tuple of numbers, optional Domain size (xmin, ymin, xmax, ymax) Returns ------- G: Graph References ---------- .. [1] B. M. Waxman, Routing of multipoint connections. IEEE J. Select. Areas Commun. 6(9),(1988) 1617-1622. """ # build graph of n nodes with random positions in the unit square G = nx.Graph() G.add_nodes_from(range(n)) (xmin,ymin,xmax,ymax)=domain for n in G: G.node[n]['pos']=((xmin + (xmax-xmin))*random.random(), (ymin + (ymax-ymin))*random.random()) if L is None: # find maximum distance L between two nodes l = 0 pos = list(nx.get_node_attributes(G,'pos').values()) while pos: x1,y1 = pos.pop() for x2,y2 in pos: r2 = (x1-x2)**2 + (y1-y2)**2 if r2 > l: l = r2 l=math.sqrt(l) else: # user specified maximum distance l = L nodes=G.nodes() if L is None: # Waxman-1 model # try all pairs, connect randomly based on euclidean distance while nodes: u = nodes.pop() x1,y1 = G.node[u]['pos'] for v in nodes: x2,y2 = G.node[v]['pos'] r = math.sqrt((x1-x2)**2 + (y1-y2)**2) if random.random() < alpha*math.exp(-r/(beta*l)): G.add_edge(u,v) else: # Waxman-2 model # try all pairs, connect randomly based on randomly chosen l while nodes: u = nodes.pop() for v in nodes: r = random.random()*l if random.random() < alpha*math.exp(-r/(beta*l)): G.add_edge(u,v) return G def navigable_small_world_graph(n, p=1, q=1, r=2, dim=2, seed=None): r"""Return a navigable small-world graph. A navigable small-world graph is a directed grid with additional long-range connections that are chosen randomly. From [1]_: Begin with a set of nodes that are identified with the set of lattice points in an `n \times n` square, `{(i,j): i\in {1,2,\ldots,n}, j\in {1,2,\ldots,n}}` and define the lattice distance between two nodes `(i,j)` and `(k,l)` to be the number of "lattice steps" separating them: `d((i,j),(k,l)) = |k-i|+|l-j|`. For a universal constant `p`, the node `u` has a directed edge to every other node within lattice distance `p` (local contacts) . For universal constants `q\ge 0` and `r\ge 0` construct directed edges from `u` to `q` other nodes (long-range contacts) using independent random trials; the i'th directed edge from `u` has endpoint `v` with probability proportional to `d(u,v)^{-r}`. Parameters ---------- n : int The number of nodes. p : int The diameter of short range connections. Each node is connected to every other node within lattice distance p. q : int The number of long-range connections for each node. r : float Exponent for decaying probability of connections. The probability of connecting to a node at lattice distance d is 1/d^r. dim : int Dimension of grid seed : int, optional Seed for random number generator (default=None). References ---------- .. [1] J. Kleinberg. The small-world phenomenon: An algorithmic perspective. Proc. 32nd ACM Symposium on Theory of Computing, 2000. """ if (p < 1): raise nx.NetworkXException("p must be >= 1") if (q < 0): raise nx.NetworkXException("q must be >= 0") if (r < 0): raise nx.NetworkXException("r must be >= 1") if not seed is None: random.seed(seed) G = nx.DiGraph() nodes = list(product(range(n),repeat=dim)) for p1 in nodes: probs = [0] for p2 in nodes: if p1==p2: continue d = sum((abs(b-a) for a,b in zip(p1,p2))) if d <= p: G.add_edge(p1,p2) probs.append(d**-r) cdf = list(nx.utils.cumulative_sum(probs)) for _ in range(q): target = nodes[bisect_left(cdf,random.uniform(0, cdf[-1]))] G.add_edge(p1,target) return G networkx-1.8.1/networkx/generators/hybrid.py0000664000175000017500000000723212177456333021166 0ustar aricaric00000000000000""" Hybrid """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nDan Schult (dschult@colgate.edu)""" # Copyright (C) 2004-2008 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. _all__ = ['kl_connected_subgraph', 'is_kl_connected'] import copy import networkx as nx def kl_connected_subgraph(G,k,l,low_memory=False,same_as_graph=False): """ Returns the maximum locally (k,l) connected subgraph of G. (k,l)-connected subgraphs are presented by Fan Chung and Li in "The Small World Phenomenon in hybrid power law graphs" to appear in "Complex Networks" (Ed. E. Ben-Naim) Lecture Notes in Physics, Springer (2004) low_memory=True then use a slightly slower, but lower memory version same_as_graph=True then return a tuple with subgraph and pflag for if G is kl-connected """ H=copy.deepcopy(G) # subgraph we construct by removing from G graphOK=True deleted_some=True # hack to start off the while loop while deleted_some: deleted_some=False for edge in H.edges(): (u,v)=edge ### Get copy of graph needed for this search if low_memory: verts=set([u,v]) for i in range(k): [verts.update(G.neighbors(w)) for w in verts.copy()] G2=G.subgraph(list(verts)) else: G2=copy.deepcopy(G) ### path=[u,v] cnt=0 accept=0 while path: cnt += 1 # Found a path if cnt>=l: accept=1 break # record edges along this graph prev=u for w in path: if prev!=w: G2.remove_edge(prev,w) prev=w # path=shortest_path(G2,u,v,k) # ??? should "Cutoff" be k+1? try: path=nx.shortest_path(G2,u,v) # ??? should "Cutoff" be k+1? except nx.NetworkXNoPath: path = False # No Other Paths if accept==0: H.remove_edge(u,v) deleted_some=True if graphOK: graphOK=False # We looked through all edges and removed none of them. # So, H is the maximal (k,l)-connected subgraph of G if same_as_graph: return (H,graphOK) return H def is_kl_connected(G,k,l,low_memory=False): """Returns True if G is kl connected.""" graphOK=True for edge in G.edges(): (u,v)=edge ### Get copy of graph needed for this search if low_memory: verts=set([u,v]) for i in range(k): [verts.update(G.neighbors(w)) for w in verts.copy()] G2=G.subgraph(verts) else: G2=copy.deepcopy(G) ### path=[u,v] cnt=0 accept=0 while path: cnt += 1 # Found a path if cnt>=l: accept=1 break # record edges along this graph prev=u for w in path: if w!=prev: G2.remove_edge(prev,w) prev=w # path=shortest_path(G2,u,v,k) # ??? should "Cutoff" be k+1? try: path=nx.shortest_path(G2,u,v) # ??? should "Cutoff" be k+1? except nx.NetworkXNoPath: path = False # No Other Paths if accept==0: graphOK=False break # return status return graphOK networkx-1.8.1/networkx/generators/line.py0000664000175000017500000000406712177456333020637 0ustar aricaric00000000000000""" Line graphs. """ # Copyright (C) 2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult(dschult@colgate.edu)""" __all__ = ['line_graph'] import networkx as nx def line_graph(G): """Return the line graph of the graph or digraph G. The line graph of a graph G has a node for each edge in G and an edge between those nodes if the two edges in G share a common node. For DiGraphs an edge an edge represents a directed path of length 2. The original node labels are kept as two-tuple node labels in the line graph. Parameters ---------- G : graph A NetworkX Graph or DiGraph Examples -------- >>> G=nx.star_graph(3) >>> L=nx.line_graph(G) >>> print(sorted(L.edges())) # makes a clique, K3 [((0, 1), (0, 2)), ((0, 1), (0, 3)), ((0, 3), (0, 2))] Notes ----- Not implemented for MultiGraph or MultiDiGraph classes. Graph, node, and edge data are not propagated to the new graph. """ if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise Exception("Line graph not implemented for Multi(Di)Graphs") L=G.__class__() if G.is_directed(): for u,nlist in G.adjacency_iter(): # same as successors for digraph # look for directed path of length two for n in nlist: nbrs=G[n] # successors for nbr in nbrs: if nbr!=u: L.add_edge((u,n),(n,nbr)) else: for u,nlist in G.adjacency_iter(): # label nodes as tuple of edge endpoints in original graph # "node tuple" must be in lexigraphical order nodes=[tuple(sorted(n)) for n in zip([u]*len(nlist),nlist)] # add clique of nodes to graph while nodes: u=nodes.pop() L.add_edges_from((u,v) for v in nodes) return L networkx-1.8.1/networkx/generators/tests/0000775000175000017500000000000012177457361020473 5ustar aricaric00000000000000networkx-1.8.1/networkx/generators/tests/test_directed.py0000664000175000017500000000244112177456333023666 0ustar aricaric00000000000000#!/usr/bin/env python """Generators - Directed Graphs ---------------------------- """ from nose.tools import * from networkx import * from networkx.generators.directed import * class TestGeneratorsDirected(): def test_smoke_test_random_graphs(self): G=gn_graph(100) G=gnr_graph(100,0.5) G=gnc_graph(100) G=scale_free_graph(100) def test_create_using_keyword_arguments(self): assert_raises(networkx.exception.NetworkXError, gn_graph, 100, create_using=Graph()) assert_raises(networkx.exception.NetworkXError, gnr_graph, 100, 0.5, create_using=Graph()) assert_raises(networkx.exception.NetworkXError, gnc_graph, 100, create_using=Graph()) assert_raises(networkx.exception.NetworkXError, scale_free_graph, 100, create_using=Graph()) G=gn_graph(100,seed=1) MG=gn_graph(100,create_using=MultiDiGraph(),seed=1) assert_equal(G.edges(), MG.edges()) G=gnr_graph(100,0.5,seed=1) MG=gnr_graph(100,0.5,create_using=MultiDiGraph(),seed=1) assert_equal(G.edges(), MG.edges()) G=gnc_graph(100,seed=1) MG=gnc_graph(100,create_using=MultiDiGraph(),seed=1) assert_equal(G.edges(), MG.edges()) networkx-1.8.1/networkx/generators/tests/test_classic.py0000664000175000017500000003613412177456333023532 0ustar aricaric00000000000000#!/usr/bin/env python """ ==================== Generators - Classic ==================== Unit tests for various classic graph generators in generators/classic.py """ from nose.tools import * from networkx import * from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic is_isomorphic=graph_could_be_isomorphic class TestGeneratorClassic(): def test_balanced_tree(self): # balanced_tree(r,h) is a tree with (r**(h+1)-1)/(r-1) edges for r,h in [(2,2),(3,3),(6,2)]: t=balanced_tree(r,h) order=t.order() assert_true(order==(r**(h+1)-1)/(r-1)) assert_true(is_connected(t)) assert_true(t.size()==order-1) dh = degree_histogram(t) assert_equal(dh[0],0) # no nodes of 0 assert_equal(dh[1],r**h) # nodes of degree 1 are leaves assert_equal(dh[r],1) # root is degree r assert_equal(dh[r+1],order-r**h-1)# everyone else is degree r+1 assert_equal(len(dh),r+2) def test_balanced_tree_star(self): # balanced_tree(r,1) is the r-star t=balanced_tree(r=2,h=1) assert_true(is_isomorphic(t,star_graph(2))) t=balanced_tree(r=5,h=1) assert_true(is_isomorphic(t,star_graph(5))) t=balanced_tree(r=10,h=1) assert_true(is_isomorphic(t,star_graph(10))) def test_full_rary_tree(self): r=2 n=9 t=full_rary_tree(r,n) assert_equal(t.order(),n) assert_true(is_connected(t)) dh = degree_histogram(t) assert_equal(dh[0],0) # no nodes of 0 assert_equal(dh[1],5) # nodes of degree 1 are leaves assert_equal(dh[r],1) # root is degree r assert_equal(dh[r+1],9-5-1) # everyone else is degree r+1 assert_equal(len(dh),r+2) def test_full_rary_tree_balanced(self): t=full_rary_tree(2,15) th=balanced_tree(2,3) assert_true(is_isomorphic(t,th)) def test_full_rary_tree_path(self): t=full_rary_tree(1,10) assert_true(is_isomorphic(t,path_graph(10))) def test_full_rary_tree_empty(self): t=full_rary_tree(0,10) assert_true(is_isomorphic(t,empty_graph(10))) t=full_rary_tree(3,0) assert_true(is_isomorphic(t,empty_graph(0))) def test_full_rary_tree_3_20(self): t=full_rary_tree(3,20) assert_equal(t.order(),20) def test_barbell_graph(self): # number of nodes = 2*m1 + m2 (2 m1-complete graphs + m2-path + 2 edges) # number of edges = 2*(number_of_edges(m1-complete graph) + m2 + 1 m1=3; m2=5 b=barbell_graph(m1,m2) assert_true(number_of_nodes(b)==2*m1+m2) assert_true(number_of_edges(b)==m1*(m1-1) + m2 + 1) assert_equal(b.name, 'barbell_graph(3,5)') m1=4; m2=10 b=barbell_graph(m1,m2) assert_true(number_of_nodes(b)==2*m1+m2) assert_true(number_of_edges(b)==m1*(m1-1) + m2 + 1) assert_equal(b.name, 'barbell_graph(4,10)') m1=3; m2=20 b=barbell_graph(m1,m2) assert_true(number_of_nodes(b)==2*m1+m2) assert_true(number_of_edges(b)==m1*(m1-1) + m2 + 1) assert_equal(b.name, 'barbell_graph(3,20)') # Raise NetworkXError if m1<2 m1=1; m2=20 assert_raises(networkx.exception.NetworkXError, barbell_graph, m1, m2) # Raise NetworkXError if m2<0 m1=5; m2=-2 assert_raises(networkx.exception.NetworkXError, barbell_graph, m1, m2) # barbell_graph(2,m) = path_graph(m+4) m1=2; m2=5 b=barbell_graph(m1,m2) assert_true(is_isomorphic(b, path_graph(m2+4))) m1=2; m2=10 b=barbell_graph(m1,m2) assert_true(is_isomorphic(b, path_graph(m2+4))) m1=2; m2=20 b=barbell_graph(m1,m2) assert_true(is_isomorphic(b, path_graph(m2+4))) assert_raises(networkx.exception.NetworkXError, barbell_graph, m1, m2, create_using=DiGraph()) mb=barbell_graph(m1, m2, create_using=MultiGraph()) assert_true(mb.edges()==b.edges()) def test_complete_graph(self): # complete_graph(m) is a connected graph with # m nodes and m*(m+1)/2 edges for m in [0, 1, 3, 5]: g = complete_graph(m) assert_true(number_of_nodes(g) == m) assert_true(number_of_edges(g) == m * (m - 1) // 2) mg=complete_graph(m, create_using=MultiGraph()) assert_true(mg.edges()==g.edges()) def test_complete_digraph(self): # complete_graph(m) is a connected graph with # m nodes and m*(m+1)/2 edges for m in [0, 1, 3, 5]: g = complete_graph(m,create_using=nx.DiGraph()) assert_true(number_of_nodes(g) == m) assert_true(number_of_edges(g) == m * (m - 1)) def test_complete_bipartite_graph(self): G=complete_bipartite_graph(0,0) assert_true(is_isomorphic( G, null_graph() )) for i in [1, 5]: G=complete_bipartite_graph(i,0) assert_true(is_isomorphic( G, empty_graph(i) )) G=complete_bipartite_graph(0,i) assert_true(is_isomorphic( G, empty_graph(i) )) G=complete_bipartite_graph(2,2) assert_true(is_isomorphic( G, cycle_graph(4) )) G=complete_bipartite_graph(1,5) assert_true(is_isomorphic( G, star_graph(5) )) G=complete_bipartite_graph(5,1) assert_true(is_isomorphic( G, star_graph(5) )) # complete_bipartite_graph(m1,m2) is a connected graph with # m1+m2 nodes and m1*m2 edges for m1, m2 in [(5, 11), (7, 3)]: G=complete_bipartite_graph(m1,m2) assert_equal(number_of_nodes(G), m1 + m2) assert_equal(number_of_edges(G), m1 * m2) assert_raises(networkx.exception.NetworkXError, complete_bipartite_graph, 7, 3, create_using=DiGraph()) mG=complete_bipartite_graph(7, 3, create_using=MultiGraph()) assert_equal(mG.edges(), G.edges()) def test_circular_ladder_graph(self): G=circular_ladder_graph(5) assert_raises(networkx.exception.NetworkXError, circular_ladder_graph, 5, create_using=DiGraph()) mG=circular_ladder_graph(5, create_using=MultiGraph()) assert_equal(mG.edges(), G.edges()) def test_cycle_graph(self): G=cycle_graph(4) assert_equal(sorted(G.edges()), [(0, 1), (0, 3), (1, 2), (2, 3)]) mG=cycle_graph(4, create_using=MultiGraph()) assert_equal(sorted(mG.edges()), [(0, 1), (0, 3), (1, 2), (2, 3)]) G=cycle_graph(4, create_using=DiGraph()) assert_false(G.has_edge(2,1)) assert_true(G.has_edge(1,2)) def test_dorogovtsev_goltsev_mendes_graph(self): G=dorogovtsev_goltsev_mendes_graph(0) assert_equal(G.edges(), [(0, 1)]) assert_equal(G.nodes(), [0, 1]) G=dorogovtsev_goltsev_mendes_graph(1) assert_equal(G.edges(), [(0, 1), (0, 2), (1, 2)]) assert_equal(average_clustering(G), 1.0) assert_equal(list(triangles(G).values()), [1, 1, 1]) G=dorogovtsev_goltsev_mendes_graph(10) assert_equal(number_of_nodes(G), 29526) assert_equal(number_of_edges(G), 59049) assert_equal(G.degree(0), 1024) assert_equal(G.degree(1), 1024) assert_equal(G.degree(2), 1024) assert_raises(networkx.exception.NetworkXError, dorogovtsev_goltsev_mendes_graph, 7, create_using=DiGraph()) assert_raises(networkx.exception.NetworkXError, dorogovtsev_goltsev_mendes_graph, 7, create_using=MultiGraph()) def test_empty_graph(self): G=empty_graph() assert_equal(number_of_nodes(G), 0) G=empty_graph(42) assert_equal(number_of_nodes(G), 42) assert_equal(number_of_edges(G), 0) assert_equal(G.name, 'empty_graph(42)') # create empty digraph G=empty_graph(42,create_using=DiGraph(name="duh")) assert_equal(number_of_nodes(G), 42) assert_equal(number_of_edges(G), 0) assert_equal(G.name, 'empty_graph(42)') assert_true(isinstance(G,DiGraph)) # create empty multigraph G=empty_graph(42,create_using=MultiGraph(name="duh")) assert_equal(number_of_nodes(G), 42) assert_equal(number_of_edges(G), 0) assert_equal(G.name, 'empty_graph(42)') assert_true(isinstance(G,MultiGraph)) # create empty graph from another pete=petersen_graph() G=empty_graph(42,create_using=pete) assert_equal(number_of_nodes(G), 42) assert_equal(number_of_edges(G), 0) assert_equal(G.name, 'empty_graph(42)') assert_true(isinstance(G,Graph)) def test_grid_2d_graph(self): n=5;m=6 G=grid_2d_graph(n,m) assert_equal(number_of_nodes(G), n*m) assert_equal(degree_histogram(G), [0,0,4,2*(n+m)-8,(n-2)*(m-2)]) DG=grid_2d_graph(n,m, create_using=DiGraph()) assert_equal(DG.succ, G.adj) assert_equal(DG.pred, G.adj) MG=grid_2d_graph(n,m, create_using=MultiGraph()) assert_equal(MG.edges(), G.edges()) def test_grid_graph(self): """grid_graph([n,m]) is a connected simple graph with the following properties: number_of_nodes=n*m degree_histogram=[0,0,4,2*(n+m)-8,(n-2)*(m-2)] """ for n, m in [(3, 5), (5, 3), (4, 5), (5, 4)]: dim=[n,m] g=grid_graph(dim) assert_equal(number_of_nodes(g), n*m) assert_equal(degree_histogram(g), [0,0,4,2*(n+m)-8,(n-2)*(m-2)]) assert_equal(dim,[n,m]) for n, m in [(1, 5), (5, 1)]: dim=[n,m] g=grid_graph(dim) assert_equal(number_of_nodes(g), n*m) assert_true(is_isomorphic(g,path_graph(5))) assert_equal(dim,[n,m]) # mg=grid_graph([n,m], create_using=MultiGraph()) # assert_equal(mg.edges(), g.edges()) def test_hypercube_graph(self): for n, G in [(0, null_graph()), (1, path_graph(2)), (2, cycle_graph(4)), (3, cubical_graph())]: g=hypercube_graph(n) assert_true(is_isomorphic(g, G)) g=hypercube_graph(4) assert_equal(degree_histogram(g), [0, 0, 0, 0, 16]) g=hypercube_graph(5) assert_equal(degree_histogram(g), [0, 0, 0, 0, 0, 32]) g=hypercube_graph(6) assert_equal(degree_histogram(g), [0, 0, 0, 0, 0, 0, 64]) # mg=hypercube_graph(6, create_using=MultiGraph()) # assert_equal(mg.edges(), g.edges()) def test_ladder_graph(self): for i, G in [(0, empty_graph(0)), (1, path_graph(2)), (2, hypercube_graph(2)), (10, grid_graph([2,10]))]: assert_true(is_isomorphic(ladder_graph(i), G)) assert_raises(networkx.exception.NetworkXError, ladder_graph, 2, create_using=DiGraph()) g = ladder_graph(2) mg=ladder_graph(2, create_using=MultiGraph()) assert_equal(mg.edges(), g.edges()) def test_lollipop_graph(self): # number of nodes = m1 + m2 # number of edges = number_of_edges(complete_graph(m1)) + m2 for m1, m2 in [(3, 5), (4, 10), (3, 20)]: b=lollipop_graph(m1,m2) assert_equal(number_of_nodes(b), m1+m2) assert_equal(number_of_edges(b), m1*(m1-1)/2 + m2) assert_equal(b.name, 'lollipop_graph(' + str(m1) + ',' + str(m2) + ')') # Raise NetworkXError if m<2 assert_raises(networkx.exception.NetworkXError, lollipop_graph, 1, 20) # Raise NetworkXError if n<0 assert_raises(networkx.exception.NetworkXError, lollipop_graph, 5, -2) # lollipop_graph(2,m) = path_graph(m+2) for m1, m2 in [(2, 5), (2, 10), (2, 20)]: b=lollipop_graph(m1,m2) assert_true(is_isomorphic(b, path_graph(m2+2))) assert_raises(networkx.exception.NetworkXError, lollipop_graph, m1, m2, create_using=DiGraph()) mb=lollipop_graph(m1, m2, create_using=MultiGraph()) assert_true(mb.edges(), b.edges()) def test_null_graph(self): assert_equal(number_of_nodes(null_graph()), 0) def test_path_graph(self): p=path_graph(0) assert_true(is_isomorphic(p, null_graph())) assert_equal(p.name, 'path_graph(0)') p=path_graph(1) assert_true(is_isomorphic( p, empty_graph(1))) assert_equal(p.name, 'path_graph(1)') p=path_graph(10) assert_true(is_connected(p)) assert_equal(sorted(list(p.degree().values())), [1, 1, 2, 2, 2, 2, 2, 2, 2, 2]) assert_equal(p.order()-1, p.size()) dp=path_graph(3, create_using=DiGraph()) assert_true(dp.has_edge(0,1)) assert_false(dp.has_edge(1,0)) mp=path_graph(10, create_using=MultiGraph()) assert_true(mp.edges()==p.edges()) def test_periodic_grid_2d_graph(self): g=grid_2d_graph(0,0, periodic=True) assert_equal(g.degree(), {}) for m, n, G in [(2, 2, cycle_graph(4)), (1, 7, cycle_graph(7)), (7, 1, cycle_graph(7)), (2, 5, circular_ladder_graph(5)), (5, 2, circular_ladder_graph(5)), (2, 4, cubical_graph()), (4, 2, cubical_graph())]: g=grid_2d_graph(m,n, periodic=True) assert_true(is_isomorphic(g, G)) DG=grid_2d_graph(4, 2, periodic=True, create_using=DiGraph()) assert_equal(DG.succ,g.adj) assert_equal(DG.pred,g.adj) MG=grid_2d_graph(4, 2, periodic=True, create_using=MultiGraph()) assert_equal(MG.edges(),g.edges()) def test_star_graph(self): assert_true(is_isomorphic(star_graph(0), empty_graph(1))) assert_true(is_isomorphic(star_graph(1), path_graph(2))) assert_true(is_isomorphic(star_graph(2), path_graph(3))) s=star_graph(10) assert_equal(sorted(list(s.degree().values())), [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 10]) assert_raises(networkx.exception.NetworkXError, star_graph, 10, create_using=DiGraph()) ms=star_graph(10, create_using=MultiGraph()) assert_true(ms.edges()==s.edges()) def test_trivial_graph(self): assert_equal(number_of_nodes(trivial_graph()), 1) def test_wheel_graph(self): for n, G in [(0, null_graph()), (1, empty_graph(1)), (2, path_graph(2)), (3, complete_graph(3)), (4, complete_graph(4))]: g=wheel_graph(n) assert_true(is_isomorphic( g, G)) assert_equal(g.name, 'wheel_graph(4)') g=wheel_graph(10) assert_equal(sorted(list(g.degree().values())), [3, 3, 3, 3, 3, 3, 3, 3, 3, 9]) assert_raises(networkx.exception.NetworkXError, wheel_graph, 10, create_using=DiGraph()) mg=wheel_graph(10, create_using=MultiGraph()) assert_equal(mg.edges(), g.edges()) networkx-1.8.1/networkx/generators/tests/test_intersection.py0000664000175000017500000000120712177456333024610 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestIntersectionGraph(): def test_random_intersection_graph(self): G=nx.uniform_random_intersection_graph(10,5,0.5) assert_equal(len(G),10) def test_k_random_intersection_graph(self): G=nx.k_random_intersection_graph(10,5,2) assert_equal(len(G),10) def test_general_random_intersection_graph(self): G=nx.general_random_intersection_graph(10,5,[0.1,0.2,0.2,0.1,0.1]) assert_equal(len(G),10) assert_raises(ValueError, nx.general_random_intersection_graph,10,5, [0.1,0.2,0.2,0.1]) networkx-1.8.1/networkx/generators/tests/test_threshold.py0000664000175000017500000001474012177456333024104 0ustar aricaric00000000000000#!/usr/bin/env python """Threshold Graphs ================ """ from nose.tools import * from nose import SkipTest from nose.plugins.attrib import attr import networkx as nx import networkx.generators.threshold as nxt from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic cnlti = nx.convert_node_labels_to_integers class TestGeneratorThreshold(): def test_threshold_sequence_graph_test(self): G=nx.star_graph(10) assert_true(nxt.is_threshold_graph(G)) assert_true(nxt.is_threshold_sequence(list(G.degree().values()))) G=nx.complete_graph(10) assert_true(nxt.is_threshold_graph(G)) assert_true(nxt.is_threshold_sequence(list(G.degree().values()))) deg=[3,2,2,1,1,1] assert_false(nxt.is_threshold_sequence(deg)) deg=[3,2,2,1] assert_true(nxt.is_threshold_sequence(deg)) G=nx.generators.havel_hakimi_graph(deg) assert_true(nxt.is_threshold_graph(G)) def test_creation_sequences(self): deg=[3,2,2,1] G=nx.generators.havel_hakimi_graph(deg) cs0=nxt.creation_sequence(deg) H0=nxt.threshold_graph(cs0) assert_equal(''.join(cs0), 'ddid') cs1=nxt.creation_sequence(deg, with_labels=True) H1=nxt.threshold_graph(cs1) assert_equal(cs1, [(1, 'd'), (2, 'd'), (3, 'i'), (0, 'd')]) cs2=nxt.creation_sequence(deg, compact=True) H2=nxt.threshold_graph(cs2) assert_equal(cs2, [2, 1, 1]) assert_equal(''.join(nxt.uncompact(cs2)), 'ddid') assert_true(graph_could_be_isomorphic(H0,G)) assert_true(graph_could_be_isomorphic(H0,H1)) assert_true(graph_could_be_isomorphic(H0,H2)) def test_shortest_path(self): deg=[3,2,2,1] G=nx.generators.havel_hakimi_graph(deg) cs1=nxt.creation_sequence(deg, with_labels=True) for n, m in [(3, 0), (0, 3), (0, 2), (0, 1), (1, 3), (3, 1), (1, 2), (2, 3)]: assert_equal(nxt.shortest_path(cs1,n,m), nx.shortest_path(G, n, m)) spl=nxt.shortest_path_length(cs1,3) spl2=nxt.shortest_path_length([ t for v,t in cs1],2) assert_equal(spl, spl2) spld={} for j,pl in enumerate(spl): n=cs1[j][0] spld[n]=pl assert_equal(spld, nx.single_source_shortest_path_length(G, 3)) def test_weights_thresholds(self): wseq=[3,4,3,3,5,6,5,4,5,6] cs=nxt.weights_to_creation_sequence(wseq,threshold=10) wseq=nxt.creation_sequence_to_weights(cs) cs2=nxt.weights_to_creation_sequence(wseq) assert_equal(cs, cs2) wseq=nxt.creation_sequence_to_weights(nxt.uncompact([3,1,2,3,3,2,3])) assert_equal(wseq, [s*0.125 for s in [4,4,4,3,5,5,2,2,2,6,6,6,1,1,7,7,7]]) wseq=nxt.creation_sequence_to_weights([3,1,2,3,3,2,3]) assert_equal(wseq, [s*0.125 for s in [4,4,4,3,5,5,2,2,2,6,6,6,1,1,7,7,7]]) wseq=nxt.creation_sequence_to_weights(list(enumerate('ddidiiidididi'))) assert_equal(wseq, [s*0.1 for s in [5,5,4,6,3,3,3,7,2,8,1,9,0]]) wseq=nxt.creation_sequence_to_weights('ddidiiidididi') assert_equal(wseq, [s*0.1 for s in [5,5,4,6,3,3,3,7,2,8,1,9,0]]) wseq=nxt.creation_sequence_to_weights('ddidiiidididid') ws=[s/float(12) for s in [6,6,5,7,4,4,4,8,3,9,2,10,1,11]] assert_true(sum([abs(c-d) for c,d in zip(wseq,ws)]) < 1e-14) def test_finding_routines(self): G=nx.Graph({1:[2],2:[3],3:[4],4:[5],5:[6]}) G.add_edge(2,4) G.add_edge(2,5) G.add_edge(2,7) G.add_edge(3,6) G.add_edge(4,6) # Alternating 4 cycle assert_equal(nxt.find_alternating_4_cycle(G), [1, 2, 3, 6]) # Threshold graph TG=nxt.find_threshold_graph(G) assert_true(nxt.is_threshold_graph(TG)) assert_equal(sorted(TG.nodes()), [1, 2, 3, 4, 5, 7]) cs=nxt.creation_sequence(TG.degree(),with_labels=True) assert_equal(nxt.find_creation_sequence(G), cs) def test_fast_versions_properties_threshold_graphs(self): cs='ddiiddid' G=nxt.threshold_graph(cs) assert_equal(nxt.density('ddiiddid'), nx.density(G)) assert_equal(sorted(nxt.degree_sequence(cs)), sorted(G.degree().values())) ts=nxt.triangle_sequence(cs) assert_equal(ts, list(nx.triangles(G).values())) assert_equal(sum(ts) // 3, nxt.triangles(cs)) c1=nxt.cluster_sequence(cs) c2=list(nx.clustering(G).values()) assert_almost_equal(sum([abs(c-d) for c,d in zip(c1,c2)]), 0) b1=nx.betweenness_centrality(G).values() b2=nxt.betweenness_sequence(cs) assert_true(sum([abs(c-d) for c,d in zip(b1,b2)]) < 1e-14) assert_equal(nxt.eigenvalues(cs), [0, 1, 3, 3, 5, 7, 7, 8]) # Degree Correlation assert_true(abs(nxt.degree_correlation(cs)+0.593038821954) < 1e-12) assert_equal(nxt.degree_correlation('diiiddi'), -0.8) assert_equal(nxt.degree_correlation('did'), -1.0) assert_equal(nxt.degree_correlation('ddd'), 1.0) assert_equal(nxt.eigenvalues('dddiii'), [0, 0, 0, 0, 3, 3]) assert_equal(nxt.eigenvalues('dddiiid'), [0, 1, 1, 1, 4, 4, 7]) def test_tg_creation_routines(self): s=nxt.left_d_threshold_sequence(5,7) s=nxt.right_d_threshold_sequence(5,7) s1=nxt.swap_d(s,1.0,1.0) @attr('numpy') def test_eigenvectors(self): try: import numpy as N eigenval=N.linalg.eigvals except ImportError: raise SkipTest('NumPy not available.') cs='ddiiddid' G=nxt.threshold_graph(cs) (tgeval,tgevec)=nxt.eigenvectors(cs) dot=N.dot assert_equal([ abs(dot(lv,lv)-1.0)<1e-9 for lv in tgevec ], [True]*8) lapl=nx.laplacian_matrix(G) # tgev=[ dot(lv,dot(lapl,lv)) for lv in tgevec ] # assert_true(sum([abs(c-d) for c,d in zip(tgev,tgeval)]) < 1e-9) # tgev.sort() # lev=list(eigenval(lapl)) # lev.sort() # assert_true(sum([abs(c-d) for c,d in zip(tgev,lev)]) < 1e-9) def test_create_using(self): cs='ddiiddid' G=nxt.threshold_graph(cs) assert_raises(nx.exception.NetworkXError, nxt.threshold_graph, cs, create_using=nx.DiGraph()) MG=nxt.threshold_graph(cs,create_using=nx.MultiGraph()) assert_equal(MG.edges(), G.edges()) networkx-1.8.1/networkx/generators/tests/test_degree_seq.py0000664000175000017500000001314612177456333024212 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx from networkx import * from networkx.generators.degree_seq import * from networkx.utils import uniform_sequence,powerlaw_sequence def test_configuration_model_empty(): # empty graph has empty degree sequence deg_seq=[] G=configuration_model(deg_seq) assert_equal(G.degree(), {}) def test_configuration_model(): deg_seq=[5,3,3,3,3,2,2,2,1,1,1] G=configuration_model(deg_seq,seed=12345678) assert_equal(sorted(G.degree().values(),reverse=True), [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]) assert_equal(sorted(G.degree(range(len(deg_seq))).values(), reverse=True), [5, 3, 3, 3, 3, 2, 2, 2, 1, 1, 1]) # test that fixed seed delivers the same graph deg_seq=[3,3,3,3,3,3,3,3,3,3,3,3] G1=configuration_model(deg_seq,seed=1000) G2=configuration_model(deg_seq,seed=1000) assert_true(is_isomorphic(G1,G2)) G1=configuration_model(deg_seq,seed=10) G2=configuration_model(deg_seq,seed=10) assert_true(is_isomorphic(G1,G2)) @raises(NetworkXError) def test_configuation_raise(): z=[5,3,3,3,3,2,2,2,1,1,1] G = configuration_model(z, create_using=DiGraph()) @raises(NetworkXError) def test_configuation_raise_odd(): z=[5,3,3,3,3,2,2,2,1,1] G = configuration_model(z, create_using=DiGraph()) @raises(NetworkXError) def test_directed_configuation_raise_unequal(): zin = [5,3,3,3,3,2,2,2,1,1] zout = [5,3,3,3,3,2,2,2,1,2] G = directed_configuration_model(zin, zout) def test_directed_configuation_mode(): G = directed_configuration_model([],[],seed=0) assert_equal(len(G),0) def test_expected_degree_graph_empty(): # empty graph has empty degree sequence deg_seq=[] G=expected_degree_graph(deg_seq) assert_equal(G.degree(), {}) def test_expected_degree_graph(): # test that fixed seed delivers the same graph deg_seq=[3,3,3,3,3,3,3,3,3,3,3,3] G1=expected_degree_graph(deg_seq,seed=1000) G2=expected_degree_graph(deg_seq,seed=1000) assert_true(is_isomorphic(G1,G2)) G1=expected_degree_graph(deg_seq,seed=10) G2=expected_degree_graph(deg_seq,seed=10) assert_true(is_isomorphic(G1,G2)) def test_expected_degree_graph_selfloops(): deg_seq=[3,3,3,3,3,3,3,3,3,3,3,3] G1=expected_degree_graph(deg_seq,seed=1000, selfloops=False) G2=expected_degree_graph(deg_seq,seed=1000, selfloops=False) assert_true(is_isomorphic(G1,G2)) def test_expected_degree_graph_skew(): deg_seq=[10,2,2,2,2] G1=expected_degree_graph(deg_seq,seed=1000) G2=expected_degree_graph(deg_seq,seed=1000) assert_true(is_isomorphic(G1,G2)) def test_havel_hakimi_construction(): G = havel_hakimi_graph([]) assert_equal(len(G),0) z=[1000,3,3,3,3,2,2,2,1,1,1] assert_raises(networkx.exception.NetworkXError, havel_hakimi_graph, z) z=["A",3,3,3,3,2,2,2,1,1,1] assert_raises(networkx.exception.NetworkXError, havel_hakimi_graph, z) z=[5,4,3,3,3,2,2,2] G=havel_hakimi_graph(z) G=configuration_model(z) z=[6,5,4,4,2,1,1,1] assert_raises(networkx.exception.NetworkXError, havel_hakimi_graph, z) z=[10,3,3,3,3,2,2,2,2,2,2] G=havel_hakimi_graph(z) assert_raises(networkx.exception.NetworkXError, havel_hakimi_graph, z, create_using=DiGraph()) def test_directed_havel_hakimi(): # Test range of valid directed degree sequences n, r = 100, 10 p = 1.0 / r for i in range(r): G1 = nx.erdos_renyi_graph(n,p*(i+1),None,True) din = list(G1.in_degree().values()) dout = list(G1.out_degree().values()) G2 = nx.directed_havel_hakimi_graph(din, dout) assert_true(din == list(G2.in_degree().values())) assert_true(dout == list(G2.out_degree().values())) # Test non-graphical sequence dout = [1000,3,3,3,3,2,2,2,1,1,1] din=[103,102,102,102,102,102,102,102,102,102] assert_raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) # Test valid sequences dout=[1, 1, 1, 1, 1, 2, 2, 2, 3, 4] din=[2, 2, 2, 2, 2, 2, 2, 2, 0, 2] G2 = nx.directed_havel_hakimi_graph(din, dout) assert_true(din == list(G2.in_degree().values())) assert_true(dout == list(G2.out_degree().values())) # Test unequal sums din=[2, 2, 2, 2, 2, 2, 2, 2, 2, 2] assert_raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) # Test for negative values din=[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, -2] assert_raises(nx.exception.NetworkXError, nx.directed_havel_hakimi_graph, din, dout) def test_degree_sequence_tree(): z=[1, 1, 1, 1, 1, 2, 2, 2, 3, 4] G=degree_sequence_tree(z) assert_true(len(G.nodes())==len(z)) assert_true(len(G.edges())==sum(z)/2) assert_raises(networkx.exception.NetworkXError, degree_sequence_tree, z, create_using=DiGraph()) z=[1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] assert_raises(networkx.exception.NetworkXError, degree_sequence_tree, z) def test_random_degree_sequence_graph(): d=[1,2,2,3] G = nx.random_degree_sequence_graph(d) assert_equal(d, list(G.degree().values())) def test_random_degree_sequence_graph_raise(): z=[1, 1, 1, 1, 1, 1, 2, 2, 2, 3, 4] assert_raises(networkx.exception.NetworkXUnfeasible, random_degree_sequence_graph, z) def test_random_degree_sequence_large(): G = nx.fast_gnp_random_graph(100,0.1) d = G.degree().values() G = nx.random_degree_sequence_graph(d, seed=0) assert_equal(sorted(d), sorted(list(G.degree().values()))) networkx-1.8.1/networkx/generators/tests/test_geometric.py0000664000175000017500000000201412177456333024055 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx as nx class TestGeneratorsGeometric(): def test_random_geometric_graph(self): G=nx.random_geometric_graph(50,0.25) assert_equal(len(G),50) def test_geographical_threshold_graph(self): G=nx.geographical_threshold_graph(50,100) assert_equal(len(G),50) def test_waxman_graph(self): G=nx.waxman_graph(50,0.5,0.1) assert_equal(len(G),50) G=nx.waxman_graph(50,0.5,0.1,L=1) assert_equal(len(G),50) def test_naviable_small_world(self): G = nx.navigable_small_world_graph(5,p=1,q=0) gg = nx.grid_2d_graph(5,5).to_directed() assert_true(nx.is_isomorphic(G,gg)) G = nx.navigable_small_world_graph(5,p=1,q=0,dim=3) gg = nx.grid_graph([5,5,5]).to_directed() assert_true(nx.is_isomorphic(G,gg)) G = nx.navigable_small_world_graph(5,p=1,q=0,dim=1) gg = nx.grid_graph([5]).to_directed() assert_true(nx.is_isomorphic(G,gg)) networkx-1.8.1/networkx/generators/tests/test_atlas.py0000664000175000017500000000316612177456333023214 0ustar aricaric00000000000000from nose.tools import * import networkx as nx class TestAtlas(object): def setUp(self): self.GAG=nx.graph_atlas_g() def test_sizes(self): G=self.GAG[0] assert_equal(G.number_of_nodes(),0) assert_equal(G.number_of_edges(),0) G=self.GAG[7] assert_equal(G.number_of_nodes(),3) assert_equal(G.number_of_edges(),3) def test_names(self): i=0 for g in self.GAG: name=g.name assert_equal(int(name[1:]),i) i+=1 def test_monotone_nodes(self): # check for monotone increasing number of nodes previous=self.GAG[0] for g in self.GAG: assert_false(len(g)-len(previous) > 1) previous=g.copy() def test_monotone_nodes(self): # check for monotone increasing number of edges # (for fixed number of nodes) previous=self.GAG[0] for g in self.GAG: if len(g)==len(previous): assert_false(g.size()-previous.size() > 1) previous=g.copy() def test_monotone_degree_sequence(self): # check for monotone increasing degree sequence # (for fixed number f nodes and edges) # note that 111223 < 112222 previous=self.GAG[0] for g in self.GAG: if len(g)==0: continue if len(g)==len(previous) & g.size()==previous.size(): deg_seq=sorted(g.degree().values()) previous_deg_seq=sorted(previous.degree().values()) assert_true(previous_deg_seq < deg_seq) previous=g.copy() networkx-1.8.1/networkx/generators/tests/test_ego.py0000664000175000017500000000250312177456333022654 0ustar aricaric00000000000000#!/usr/bin/env python """ ego graph --------- """ from nose.tools import assert_true, assert_equal import networkx as nx class TestGeneratorEgo(): def test_ego(self): G=nx.star_graph(3) H=nx.ego_graph(G,0) assert_true(nx.is_isomorphic(G,H)) G.add_edge(1,11) G.add_edge(2,22) G.add_edge(3,33) H=nx.ego_graph(G,0) assert_true(nx.is_isomorphic(nx.star_graph(3),H)) G=nx.path_graph(3) H=nx.ego_graph(G,0) assert_equal(H.edges(), [(0, 1)]) H=nx.ego_graph(G,0,undirected=True) assert_equal(H.edges(), [(0, 1)]) H=nx.ego_graph(G,0,center=False) assert_equal(H.edges(), []) def test_ego_distance(self): G=nx.Graph() G.add_edge(0,1,weight=2,distance=1) G.add_edge(1,2,weight=2,distance=2) G.add_edge(2,3,weight=2,distance=1) assert_equal(sorted(nx.ego_graph(G,0,radius=3).nodes()),[0,1,2,3]) eg=nx.ego_graph(G,0,radius=3,distance='weight') assert_equal(sorted(eg.nodes()),[0,1]) eg=nx.ego_graph(G,0,radius=3,distance='weight',undirected=True) assert_equal(sorted(eg.nodes()),[0,1]) eg=nx.ego_graph(G,0,radius=3,distance='distance') assert_equal(sorted(eg.nodes()),[0,1,2]) networkx-1.8.1/networkx/generators/tests/test_stochastic.py0000664000175000017500000000177112177456333024254 0ustar aricaric00000000000000from nose.tools import assert_true, assert_equal, raises import networkx as nx def test_stochastic(): G=nx.DiGraph() G.add_edge(0,1) G.add_edge(0,2) S=nx.stochastic_graph(G) assert_true(nx.is_isomorphic(G,S)) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) S=nx.stochastic_graph(G,copy=True) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) def test_stochastic_ints(): G=nx.DiGraph() G.add_edge(0,1,weight=1) G.add_edge(0,2,weight=1) S=nx.stochastic_graph(G) assert_equal(sorted(S.edges(data=True)), [(0, 1, {'weight': 0.5}), (0, 2, {'weight': 0.5})]) @raises(nx.NetworkXError) def test_stochastic_graph_input(): S = nx.stochastic_graph(nx.Graph()) @raises(nx.NetworkXError) def test_stochastic_multigraph_input(): S = nx.stochastic_graph(nx.MultiGraph()) networkx-1.8.1/networkx/generators/tests/test_hybrid.py0000664000175000017500000000145312177456333023366 0ustar aricaric00000000000000from nose.tools import * import networkx as nx def test_2d_grid_graph(): # FC article claims 2d grid graph of size n is (3,3)-connected # and (5,9)-connected, but I don't think it is (5,9)-connected G=nx.grid_2d_graph(8,8,periodic=True) assert_true(nx.is_kl_connected(G,3,3)) assert_false(nx.is_kl_connected(G,5,9)) (H,graphOK)=nx.kl_connected_subgraph(G,5,9,same_as_graph=True) assert_false(graphOK) def test_small_graph(): G=nx.Graph() G.add_edge(1,2) G.add_edge(1,3) G.add_edge(2,3) assert_true(nx.is_kl_connected(G,2,2)) H=nx.kl_connected_subgraph(G,2,2) (H,graphOK)=nx.kl_connected_subgraph(G,2,2, low_memory=True, same_as_graph=True) assert_true(graphOK) networkx-1.8.1/networkx/generators/tests/test_bipartite.py0000664000175000017500000001410612177456333024067 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from networkx import * from networkx.generators.bipartite import * """Generators - Bipartite ---------------------- """ class TestGeneratorsBipartite(): def test_configuration_model(self): aseq=[3,3,3,3] bseq=[2,2,2,2,2] assert_raises(networkx.exception.NetworkXError, bipartite_configuration_model, aseq, bseq) aseq=[3,3,3,3] bseq=[2,2,2,2,2,2] G=bipartite_configuration_model(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,2,2,2] bseq=[3,3,3,3] G=bipartite_configuration_model(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,1,1,1] bseq=[3,3,3] G=bipartite_configuration_model(aseq,bseq) assert_equal(sorted(G.degree().values()), [1, 1, 1, 2, 2, 2, 3, 3, 3]) GU=project(Graph(G),range(len(aseq))) assert_equal(GU.number_of_nodes(), 6) GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq))) assert_equal(GD.number_of_nodes(), 3) assert_raises(networkx.exception.NetworkXError, bipartite_configuration_model, aseq, bseq, create_using=DiGraph()) def test_havel_hakimi_graph(self): aseq=[3,3,3,3] bseq=[2,2,2,2,2] assert_raises(networkx.exception.NetworkXError, bipartite_havel_hakimi_graph, aseq, bseq) bseq=[2,2,2,2,2,2] G=bipartite_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,2,2,2] bseq=[3,3,3,3] G=bipartite_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) GU=project(Graph(G),range(len(aseq))) assert_equal(GU.number_of_nodes(), 6) GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq))) assert_equal(GD.number_of_nodes(), 4) assert_raises(networkx.exception.NetworkXError, bipartite_havel_hakimi_graph, aseq, bseq, create_using=DiGraph()) def test_reverse_havel_hakimi_graph(self): aseq=[3,3,3,3] bseq=[2,2,2,2,2] assert_raises(networkx.exception.NetworkXError, bipartite_reverse_havel_hakimi_graph, aseq, bseq) bseq=[2,2,2,2,2,2] G=bipartite_reverse_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,2,2,2] bseq=[3,3,3,3] G=bipartite_reverse_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,1,1,1] bseq=[3,3,3] G=bipartite_reverse_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [1, 1, 1, 2, 2, 2, 3, 3, 3]) GU=project(Graph(G),range(len(aseq))) assert_equal(GU.number_of_nodes(), 6) GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq))) assert_equal(GD.number_of_nodes(), 3) assert_raises(networkx.exception.NetworkXError, bipartite_reverse_havel_hakimi_graph, aseq, bseq, create_using=DiGraph()) def test_alternating_havel_hakimi_graph(self): aseq=[3,3,3,3] bseq=[2,2,2,2,2] assert_raises(networkx.exception.NetworkXError, bipartite_alternating_havel_hakimi_graph, aseq, bseq) bseq=[2,2,2,2,2,2] G=bipartite_alternating_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,2,2,2] bseq=[3,3,3,3] G=bipartite_alternating_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]) aseq=[2,2,2,1,1,1] bseq=[3,3,3] G=bipartite_alternating_havel_hakimi_graph(aseq,bseq) assert_equal(sorted(G.degree().values()), [1, 1, 1, 2, 2, 2, 3, 3, 3]) GU=project(Graph(G),range(len(aseq))) assert_equal(GU.number_of_nodes(), 6) GD=project(Graph(G),range(len(aseq),len(aseq)+len(bseq))) assert_equal(GD.number_of_nodes(), 3) assert_raises(networkx.exception.NetworkXError, bipartite_alternating_havel_hakimi_graph, aseq, bseq, create_using=DiGraph()) def test_preferential_attachment(self): aseq=[3,2,1,1] G=bipartite_preferential_attachment_graph(aseq,0.5) assert_raises(networkx.exception.NetworkXError, bipartite_preferential_attachment_graph, aseq, 0.5, create_using=DiGraph()) def test_bipartite_random_graph(self): n=10 m=20 G=bipartite_random_graph(n,m,0.9) assert_equal(len(G),30) assert_true(is_bipartite(G)) X,Y=nx.algorithms.bipartite.sets(G) assert_equal(set(range(n)),X) assert_equal(set(range(n,n+m)),Y) def test_directed_bipartite_random_graph(self): n=10 m=20 G=bipartite_random_graph(n,m,0.9,directed=True) assert_equal(len(G),30) assert_true(is_bipartite(G)) X,Y=nx.algorithms.bipartite.sets(G) assert_equal(set(range(n)),X) assert_equal(set(range(n,n+m)),Y) def test_bipartite_gnmk_random_graph(self): n = 10 m = 20 edges = 100 G = bipartite_gnmk_random_graph(n, m, edges) assert_equal(len(G),30) assert_true(is_bipartite(G)) X,Y=nx.algorithms.bipartite.sets(G) print(X) assert_equal(set(range(n)),X) assert_equal(set(range(n,n+m)),Y) assert_equal(edges, len(G.edges())) networkx-1.8.1/networkx/generators/tests/test_small.py0000664000175000017500000001437112177456333023220 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from networkx import * from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic is_isomorphic=graph_could_be_isomorphic """Generators - Small ===================== Some small graphs """ null=null_graph() class TestGeneratorsSmall(): def test_make_small_graph(self): d=["adjacencylist","Bull Graph",5,[[2,3],[1,3,4],[1,2,5],[2],[3]]] G=make_small_graph(d) assert_true(is_isomorphic(G, bull_graph())) def test__LCF_graph(self): # If n<=0, then return the null_graph G=LCF_graph(-10,[1,2],100) assert_true(is_isomorphic(G,null)) G=LCF_graph(0,[1,2],3) assert_true(is_isomorphic(G,null)) G=LCF_graph(0,[1,2],10) assert_true(is_isomorphic(G,null)) # Test that LCF(n,[],0) == cycle_graph(n) for a, b, c in [(5, [], 0), (10, [], 0), (5, [], 1), (10, [], 10)]: G=LCF_graph(a, b, c) assert_true(is_isomorphic(G,cycle_graph(a))) # Generate the utility graph K_{3,3} G=LCF_graph(6,[3,-3],3) utility_graph=complete_bipartite_graph(3,3) assert_true(is_isomorphic(G, utility_graph)) def test_properties_named_small_graphs(self): G=bull_graph() assert_equal(G.number_of_nodes(), 5) assert_equal(G.number_of_edges(), 5) assert_equal(sorted(G.degree().values()), [1, 1, 2, 3, 3]) assert_equal(diameter(G), 3) assert_equal(radius(G), 2) G=chvatal_graph() assert_equal(G.number_of_nodes(), 12) assert_equal(G.number_of_edges(), 24) assert_equal(list(G.degree().values()), 12 * [4]) assert_equal(diameter(G), 2) assert_equal(radius(G), 2) G=cubical_graph() assert_equal(G.number_of_nodes(), 8) assert_equal(G.number_of_edges(), 12) assert_equal(list(G.degree().values()), 8*[3]) assert_equal(diameter(G), 3) assert_equal(radius(G), 3) G=desargues_graph() assert_equal(G.number_of_nodes(), 20) assert_equal(G.number_of_edges(), 30) assert_equal(list(G.degree().values()), 20*[3]) G=diamond_graph() assert_equal(G.number_of_nodes(), 4) assert_equal(sorted(G.degree().values()), [2, 2, 3, 3]) assert_equal(diameter(G), 2) assert_equal(radius(G), 1) G=dodecahedral_graph() assert_equal(G.number_of_nodes(), 20) assert_equal(G.number_of_edges(), 30) assert_equal(list(G.degree().values()), 20*[3]) assert_equal(diameter(G), 5) assert_equal(radius(G), 5) G=frucht_graph() assert_equal(G.number_of_nodes(), 12) assert_equal(G.number_of_edges(), 18) assert_equal(list(G.degree().values()), 12*[3]) assert_equal(diameter(G), 4) assert_equal(radius(G), 3) G=heawood_graph() assert_equal(G.number_of_nodes(), 14) assert_equal(G.number_of_edges(), 21) assert_equal(list(G.degree().values()), 14*[3]) assert_equal(diameter(G), 3) assert_equal(radius(G), 3) G=house_graph() assert_equal(G.number_of_nodes(), 5) assert_equal(G.number_of_edges(), 6) assert_equal(sorted(G.degree().values()), [2, 2, 2, 3, 3]) assert_equal(diameter(G), 2) assert_equal(radius(G), 2) G=house_x_graph() assert_equal(G.number_of_nodes(), 5) assert_equal(G.number_of_edges(), 8) assert_equal(sorted(G.degree().values()), [2, 3, 3, 4, 4]) assert_equal(diameter(G), 2) assert_equal(radius(G), 1) G=icosahedral_graph() assert_equal(G.number_of_nodes(), 12) assert_equal(G.number_of_edges(), 30) assert_equal(list(G.degree().values()), [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]) assert_equal(diameter(G), 3) assert_equal(radius(G), 3) G=krackhardt_kite_graph() assert_equal(G.number_of_nodes(), 10) assert_equal(G.number_of_edges(), 18) assert_equal(sorted(G.degree().values()), [1, 2, 3, 3, 3, 4, 4, 5, 5, 6]) G=moebius_kantor_graph() assert_equal(G.number_of_nodes(), 16) assert_equal(G.number_of_edges(), 24) assert_equal(list(G.degree().values()), 16*[3]) assert_equal(diameter(G), 4) G=octahedral_graph() assert_equal(G.number_of_nodes(), 6) assert_equal(G.number_of_edges(), 12) assert_equal(list(G.degree().values()), 6*[4]) assert_equal(diameter(G), 2) assert_equal(radius(G), 2) G=pappus_graph() assert_equal(G.number_of_nodes(), 18) assert_equal(G.number_of_edges(), 27) assert_equal(list(G.degree().values()), 18*[3]) assert_equal(diameter(G), 4) G=petersen_graph() assert_equal(G.number_of_nodes(), 10) assert_equal(G.number_of_edges(), 15) assert_equal(list(G.degree().values()), 10*[3]) assert_equal(diameter(G), 2) assert_equal(radius(G), 2) G=sedgewick_maze_graph() assert_equal(G.number_of_nodes(), 8) assert_equal(G.number_of_edges(), 10) assert_equal(sorted(G.degree().values()), [1, 2, 2, 2, 3, 3, 3, 4]) G=tetrahedral_graph() assert_equal(G.number_of_nodes(), 4) assert_equal(G.number_of_edges(), 6) assert_equal(list(G.degree().values()), [3, 3, 3, 3]) assert_equal(diameter(G), 1) assert_equal(radius(G), 1) G=truncated_cube_graph() assert_equal(G.number_of_nodes(), 24) assert_equal(G.number_of_edges(), 36) assert_equal(list(G.degree().values()), 24*[3]) G=truncated_tetrahedron_graph() assert_equal(G.number_of_nodes(), 12) assert_equal(G.number_of_edges(), 18) assert_equal(list(G.degree().values()), 12*[3]) G=tutte_graph() assert_equal(G.number_of_nodes(), 46) assert_equal(G.number_of_edges(), 69) assert_equal(list(G.degree().values()), 46*[3]) # Test create_using with directed or multigraphs on small graphs assert_raises(networkx.exception.NetworkXError, tutte_graph, create_using=DiGraph()) MG=tutte_graph(create_using=MultiGraph()) assert_equal(MG.edges(), G.edges()) networkx-1.8.1/networkx/generators/tests/test_line.py0000664000175000017500000000141712177456333023034 0ustar aricaric00000000000000#!/usr/bin/env python """line graph ---------- """ import networkx as nx from nose.tools import * class TestGeneratorLine(): def test_line(self): G=nx.star_graph(5) L=nx.line_graph(G) assert_true(nx.is_isomorphic(L,nx.complete_graph(5))) G=nx.path_graph(5) L=nx.line_graph(G) assert_true(nx.is_isomorphic(L,nx.path_graph(4))) G=nx.cycle_graph(5) L=nx.line_graph(G) assert_true(nx.is_isomorphic(L,G)) G=nx.DiGraph() G.add_edges_from([(0,1),(0,2),(0,3)]) L=nx.line_graph(G) assert_equal(L.adj, {}) G=nx.DiGraph() G.add_edges_from([(0,1),(1,2),(2,3)]) L=nx.line_graph(G) assert_equal(sorted(L.edges()), [((0, 1), (1, 2)), ((1, 2), (2, 3))]) networkx-1.8.1/networkx/generators/tests/test_random_graphs.py0000664000175000017500000000757512177456333024744 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * from networkx import * from networkx.generators.random_graphs import * class TestGeneratorsRandom(): def smoke_test_random_graph(self): seed = 42 G=gnp_random_graph(100,0.25,seed) G=binomial_graph(100,0.25,seed) G=erdos_renyi_graph(100,0.25,seed) G=fast_gnp_random_graph(100,0.25,seed) G=gnm_random_graph(100,20,seed) G=dense_gnm_random_graph(100,20,seed) G=watts_strogatz_graph(10,2,0.25,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=connected_watts_strogatz_graph(10,2,0.1,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=watts_strogatz_graph(10,4,0.25,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 20) G=newman_watts_strogatz_graph(10,2,0.0,seed) assert_equal(len(G), 10) assert_equal(G.number_of_edges(), 10) G=newman_watts_strogatz_graph(10,4,0.25,seed) assert_equal(len(G), 10) assert_true(G.number_of_edges() >= 20) G=barabasi_albert_graph(100,1,seed) G=barabasi_albert_graph(100,3,seed) assert_equal(G.number_of_edges(),(97*3)) G=powerlaw_cluster_graph(100,1,1.0,seed) G=powerlaw_cluster_graph(100,3,0.0,seed) assert_equal(G.number_of_edges(),(97*3)) G=random_regular_graph(10,20,seed) assert_raises(networkx.exception.NetworkXError, random_regular_graph, 3, 21) constructor=[(10,20,0.8),(20,40,0.8)] G=random_shell_graph(constructor,seed) G=nx.random_lobster(10,0.1,0.5,seed) def test_gnp(self): G=gnp_random_graph(10,0.1) assert_equal(len(G),10) G=gnp_random_graph(10,0.1,seed=42) assert_equal(len(G),10) G=gnp_random_graph(10,1.1) assert_equal(len(G),10) assert_equal(len(G.edges()),45) G=gnp_random_graph(10,1.1,directed=True) assert_equal(len(G),10) assert_equal(len(G.edges()),90) G=gnp_random_graph(10,-1.1) assert_equal(len(G),10) assert_equal(len(G.edges()),0) G=binomial_graph(10,0.1) assert_equal(len(G),10) G=erdos_renyi_graph(10,0.1) assert_equal(len(G),10) def test_fast_gnp(self): G=fast_gnp_random_graph(10,0.1) assert_equal(len(G),10) G=fast_gnp_random_graph(10,0.1,seed=42) assert_equal(len(G),10) G=fast_gnp_random_graph(10,1.1) assert_equal(len(G),10) assert_equal(len(G.edges()),45) G=fast_gnp_random_graph(10,-1.1) assert_equal(len(G),10) assert_equal(len(G.edges()),0) G=fast_gnp_random_graph(10,0.1,directed=True) assert_true(G.is_directed()) assert_equal(len(G),10) def test_gnm(self): G=gnm_random_graph(10,3) assert_equal(len(G),10) assert_equal(len(G.edges()),3) G=gnm_random_graph(10,3,seed=42) assert_equal(len(G),10) assert_equal(len(G.edges()),3) G=gnm_random_graph(10,100) assert_equal(len(G),10) assert_equal(len(G.edges()),45) G=gnm_random_graph(10,100,directed=True) assert_equal(len(G),10) assert_equal(len(G.edges()),90) G=gnm_random_graph(10,-1.1) assert_equal(len(G),10) assert_equal(len(G.edges()),0) def test_watts_strogatz_big_k(self): assert_raises(networkx.exception.NetworkXError, watts_strogatz_graph, 10, 10, 0.25) assert_raises(networkx.exception.NetworkXError, newman_watts_strogatz_graph, 10, 10, 0.25) # could create an infinite loop, now doesn't # infinite loop used to occur when a node has degree n-1 and needs to rewire watts_strogatz_graph(10, 9, 0.25, seed=0) newman_watts_strogatz_graph(10, 9, 0.5, seed=0) networkx-1.8.1/networkx/generators/tests/test_random_clustered.py0000664000175000017500000000166212177456333025441 0ustar aricaric00000000000000#!/usr/bin/env python from nose.tools import * import networkx class TestRandomClusteredGraph: def test_valid(self): node=[1,1,1,2,1,2,0,0] tri=[0,0,0,0,0,1,1,1] joint_degree_sequence=zip(node,tri) G = networkx.random_clustered_graph(joint_degree_sequence) assert_equal(G.number_of_nodes(),8) assert_equal(G.number_of_edges(),7) def test_valid2(self): G = networkx.random_clustered_graph(\ [(1,2),(2,1),(1,1),(1,1),(1,1),(2,0)]) assert_equal(G.number_of_nodes(),6) assert_equal(G.number_of_edges(),10) def test_invalid1(self): assert_raises((TypeError,networkx.NetworkXError), networkx.random_clustered_graph,[[1,1],[2,1],[0,1]]) def test_invalid2(self): assert_raises((TypeError,networkx.NetworkXError), networkx.random_clustered_graph,[[1,1],[1,2],[0,1]]) networkx-1.8.1/networkx/generators/ego.py0000664000175000017500000000433112177456333020454 0ustar aricaric00000000000000""" Ego graph. """ # Copyright (C) 2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """\n""".join(['Drew Conway ', 'Aric Hagberg ']) __all__ = ['ego_graph'] import networkx as nx def ego_graph(G,n,radius=1,center=True,undirected=False,distance=None): """Returns induced subgraph of neighbors centered at node n within a given radius. Parameters ---------- G : graph A NetworkX Graph or DiGraph n : node A single node radius : number, optional Include all neighbors of distance<=radius from n. center : bool, optional If False, do not include center node in graph undirected : bool, optional If True use both in- and out-neighbors of directed graphs. distance : key, optional Use specified edge data key as distance. For example, setting distance='weight' will use the edge weight to measure the distance from the node n. Notes ----- For directed graphs D this produces the "out" neighborhood or successors. If you want the neighborhood of predecessors first reverse the graph with D.reverse(). If you want both directions use the keyword argument undirected=True. Node, edge, and graph attributes are copied to the returned subgraph. """ if undirected: if distance is not None: sp,_=nx.single_source_dijkstra(G.to_undirected(), n,cutoff=radius, weight=distance) else: sp=nx.single_source_shortest_path_length(G.to_undirected(), n,cutoff=radius) else: if distance is not None: sp,_=nx.single_source_dijkstra(G, n,cutoff=radius, weight=distance) else: sp=nx.single_source_shortest_path_length(G,n,cutoff=radius) H=G.subgraph(sp).copy() if not center: H.remove_node(n) return H networkx-1.8.1/networkx/generators/classic.py0000664000175000017500000003710612177456333021331 0ustar aricaric00000000000000""" Generators for some classic graphs. The typical graph generator is called as follows: >>> G=nx.complete_graph(100) returning the complete graph on n nodes labeled 0,..,99 as a simple graph. Except for empty_graph, all the generators in this module return a Graph class (i.e. a simple, undirected graph). """ # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import itertools __author__ ="""Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)""" __all__ = [ 'balanced_tree', 'barbell_graph', 'complete_graph', 'complete_bipartite_graph', 'circular_ladder_graph', 'cycle_graph', 'dorogovtsev_goltsev_mendes_graph', 'empty_graph', 'full_rary_tree', 'grid_graph', 'grid_2d_graph', 'hypercube_graph', 'ladder_graph', 'lollipop_graph', 'null_graph', 'path_graph', 'star_graph', 'trivial_graph', 'wheel_graph'] #------------------------------------------------------------------- # Some Classic Graphs #------------------------------------------------------------------- import networkx as nx from networkx.utils import is_list_of_ints, flatten def _tree_edges(n,r): # helper function for trees # yields edges in rooted tree at 0 with n nodes and branching ratio r nodes=iter(range(n)) parents=[next(nodes)] # stack of max length r while parents: source=parents.pop(0) for i in range(r): try: target=next(nodes) parents.append(target) yield source,target except StopIteration: break def full_rary_tree(r, n, create_using=None): """Creates a full r-ary tree of n vertices. Sometimes called a k-ary, n-ary, or m-ary tree. "... all non-leaf vertices have exactly r children and all levels are full except for some rightmost position of the bottom level (if a leaf at the bottom level is missing, then so are all of the leaves to its right." [1]_ Parameters ---------- r : int branching factor of the tree n : int Number of nodes in the tree create_using : NetworkX graph type, optional Use specified type to construct graph (default = networkx.Graph) Returns ------- G : networkx Graph An r-ary tree with n nodes References ---------- .. [1] An introduction to data structures and algorithms, James Andrew Storer, Birkhauser Boston 2001, (page 225). """ G=nx.empty_graph(n,create_using) G.add_edges_from(_tree_edges(n,r)) return G def balanced_tree(r, h, create_using=None): """Return the perfectly balanced r-tree of height h. Parameters ---------- r : int Branching factor of the tree h : int Height of the tree create_using : NetworkX graph type, optional Use specified type to construct graph (default = networkx.Graph) Returns ------- G : networkx Graph A tree with n nodes Notes ----- This is the rooted tree where all leaves are at distance h from the root. The root has degree r and all other internal nodes have degree r+1. Node labels are the integers 0 (the root) up to number_of_nodes - 1. Also refered to as a complete r-ary tree. """ # number of nodes is n=1+r+..+r^h if r==1: n=2 else: n = int((1-r**(h+1))/(1-r)) # sum of geometric series r!=1 G=nx.empty_graph(n,create_using) G.add_edges_from(_tree_edges(n,r)) return G return nx.full_rary_tree(r,n,create_using) def barbell_graph(m1,m2,create_using=None): """Return the Barbell Graph: two complete graphs connected by a path. For m1 > 1 and m2 >= 0. Two identical complete graphs K_{m1} form the left and right bells, and are connected by a path P_{m2}. The 2*m1+m2 nodes are numbered 0,...,m1-1 for the left barbell, m1,...,m1+m2-1 for the path, and m1+m2,...,2*m1+m2-1 for the right barbell. The 3 subgraphs are joined via the edges (m1-1,m1) and (m1+m2-1,m1+m2). If m2=0, this is merely two complete graphs joined together. This graph is an extremal example in David Aldous and Jim Fill's etext on Random Walks on Graphs. """ if create_using is not None and create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") if m1<2: raise nx.NetworkXError(\ "Invalid graph description, m1 should be >=2") if m2<0: raise nx.NetworkXError(\ "Invalid graph description, m2 should be >=0") # left barbell G=complete_graph(m1,create_using) G.name="barbell_graph(%d,%d)"%(m1,m2) # connecting path G.add_nodes_from([v for v in range(m1,m1+m2-1)]) if m2>1: G.add_edges_from([(v,v+1) for v in range(m1,m1+m2-1)]) # right barbell G.add_edges_from( (u,v) for u in range(m1+m2,2*m1+m2) for v in range(u+1,2*m1+m2)) # connect it up G.add_edge(m1-1,m1) if m2>0: G.add_edge(m1+m2-1,m1+m2) return G def complete_graph(n,create_using=None): """ Return the complete graph K_n with n nodes. Node labels are the integers 0 to n-1. """ G=empty_graph(n,create_using) G.name="complete_graph(%d)"%(n) if n>1: if G.is_directed(): edges=itertools.permutations(range(n),2) else: edges=itertools.combinations(range(n),2) G.add_edges_from(edges) return G def complete_bipartite_graph(n1,n2,create_using=None): """Return the complete bipartite graph K_{n1_n2}. Composed of two partitions with n1 nodes in the first and n2 nodes in the second. Each node in the first is connected to each node in the second. Node labels are the integers 0 to n1+n2-1 """ if create_using is not None and create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") G=empty_graph(n1+n2,create_using) G.name="complete_bipartite_graph(%d,%d)"%(n1,n2) for v1 in range(n1): for v2 in range(n2): G.add_edge(v1,n1+v2) return G def circular_ladder_graph(n,create_using=None): """Return the circular ladder graph CL_n of length n. CL_n consists of two concentric n-cycles in which each of the n pairs of concentric nodes are joined by an edge. Node labels are the integers 0 to n-1 """ G=ladder_graph(n,create_using) G.name="circular_ladder_graph(%d)"%n G.add_edge(0,n-1) G.add_edge(n,2*n-1) return G def cycle_graph(n,create_using=None): """Return the cycle graph C_n over n nodes. C_n is the n-path with two end-nodes connected. Node labels are the integers 0 to n-1 If create_using is a DiGraph, the direction is in increasing order. """ G=path_graph(n,create_using) G.name="cycle_graph(%d)"%n if n>1: G.add_edge(n-1,0) return G def dorogovtsev_goltsev_mendes_graph(n,create_using=None): """Return the hierarchically constructed Dorogovtsev-Goltsev-Mendes graph. n is the generation. See: arXiv:/cond-mat/0112143 by Dorogovtsev, Goltsev and Mendes. """ if create_using is not None: if create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") if create_using.is_multigraph(): raise nx.NetworkXError("Multigraph not supported") G=empty_graph(0,create_using) G.name="Dorogovtsev-Goltsev-Mendes Graph" G.add_edge(0,1) if n==0: return G new_node = 2 # next node to be added for i in range(1,n+1): #iterate over number of generations. last_generation_edges = G.edges() number_of_edges_in_last_generation = len(last_generation_edges) for j in range(0,number_of_edges_in_last_generation): G.add_edge(new_node,last_generation_edges[j][0]) G.add_edge(new_node,last_generation_edges[j][1]) new_node += 1 return G def empty_graph(n=0,create_using=None): """Return the empty graph with n nodes and zero edges. Node labels are the integers 0 to n-1 For example: >>> G=nx.empty_graph(10) >>> G.number_of_nodes() 10 >>> G.number_of_edges() 0 The variable create_using should point to a "graph"-like object that will be cleaned (nodes and edges will be removed) and refitted as an empty "graph" with n nodes with integer labels. This capability is useful for specifying the class-nature of the resulting empty "graph" (i.e. Graph, DiGraph, MyWeirdGraphClass, etc.). The variable create_using has two main uses: Firstly, the variable create_using can be used to create an empty digraph, network,etc. For example, >>> n=10 >>> G=nx.empty_graph(n,create_using=nx.DiGraph()) will create an empty digraph on n nodes. Secondly, one can pass an existing graph (digraph, pseudograph, etc.) via create_using. For example, if G is an existing graph (resp. digraph, pseudograph, etc.), then empty_graph(n,create_using=G) will empty G (i.e. delete all nodes and edges using G.clear() in base) and then add n nodes and zero edges, and return the modified graph (resp. digraph, pseudograph, etc.). See also create_empty_copy(G). """ if create_using is None: # default empty graph is a simple graph G=nx.Graph() else: G=create_using G.clear() G.add_nodes_from(range(n)) G.name="empty_graph(%d)"%n return G def grid_2d_graph(m,n,periodic=False,create_using=None): """ Return the 2d grid graph of mxn nodes, each connected to its nearest neighbors. Optional argument periodic=True will connect boundary nodes via periodic boundary conditions. """ G=empty_graph(0,create_using) G.name="grid_2d_graph" rows=range(m) columns=range(n) G.add_nodes_from( (i,j) for i in rows for j in columns ) G.add_edges_from( ((i,j),(i-1,j)) for i in rows for j in columns if i>0 ) G.add_edges_from( ((i,j),(i,j-1)) for i in rows for j in columns if j>0 ) if G.is_directed(): G.add_edges_from( ((i,j),(i+1,j)) for i in rows for j in columns if i2: G.add_edges_from( ((i,0),(i,n-1)) for i in rows ) if G.is_directed(): G.add_edges_from( ((i,n-1),(i,0)) for i in rows ) if m>2: G.add_edges_from( ((0,j),(m-1,j)) for j in columns ) if G.is_directed(): G.add_edges_from( ((m-1,j),(0,j)) for j in columns ) G.name="periodic_grid_2d_graph(%d,%d)"%(m,n) return G def grid_graph(dim,periodic=False): """ Return the n-dimensional grid graph. The dimension is the length of the list 'dim' and the size in each dimension is the value of the list element. E.g. G=grid_graph(dim=[2,3]) produces a 2x3 grid graph. If periodic=True then join grid edges with periodic boundary conditions. """ dlabel="%s"%dim if dim==[]: G=empty_graph(0) G.name="grid_graph(%s)"%dim return G if not is_list_of_ints(dim): raise nx.NetworkXError("dim is not a list of integers") if min(dim)<=0: raise nx.NetworkXError(\ "dim is not a list of strictly positive integers") if periodic: func=cycle_graph else: func=path_graph dim=list(dim) current_dim=dim.pop() G=func(current_dim) while len(dim)>0: current_dim=dim.pop() # order matters: copy before it is cleared during the creation of Gnew Gold=G.copy() Gnew=func(current_dim) # explicit: create_using=None # This is so that we get a new graph of Gnew's class. G=nx.cartesian_product(Gnew,Gold) # graph G is done but has labels of the form (1,(2,(3,1))) # so relabel H=nx.relabel_nodes(G, flatten) H.name="grid_graph(%s)"%dlabel return H def hypercube_graph(n): """Return the n-dimensional hypercube. Node labels are the integers 0 to 2**n - 1. """ dim=n*[2] G=grid_graph(dim) G.name="hypercube_graph_(%d)"%n return G def ladder_graph(n,create_using=None): """Return the Ladder graph of length n. This is two rows of n nodes, with each pair connected by a single edge. Node labels are the integers 0 to 2*n - 1. """ if create_using is not None and create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") G=empty_graph(2*n,create_using) G.name="ladder_graph_(%d)"%n G.add_edges_from([(v,v+1) for v in range(n-1)]) G.add_edges_from([(v,v+1) for v in range(n,2*n-1)]) G.add_edges_from([(v,v+n) for v in range(n)]) return G def lollipop_graph(m,n,create_using=None): """Return the Lollipop Graph; K_m connected to P_n. This is the Barbell Graph without the right barbell. For m>1 and n>=0, the complete graph K_m is connected to the path P_n. The resulting m+n nodes are labelled 0,...,m-1 for the complete graph and m,...,m+n-1 for the path. The 2 subgraphs are joined via the edge (m-1,m). If n=0, this is merely a complete graph. Node labels are the integers 0 to number_of_nodes - 1. (This graph is an extremal example in David Aldous and Jim Fill's etext on Random Walks on Graphs.) """ if create_using is not None and create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") if m<2: raise nx.NetworkXError(\ "Invalid graph description, m should be >=2") if n<0: raise nx.NetworkXError(\ "Invalid graph description, n should be >=0") # the ball G=complete_graph(m,create_using) # the stick G.add_nodes_from([v for v in range(m,m+n)]) if n>1: G.add_edges_from([(v,v+1) for v in range(m,m+n-1)]) # connect ball to stick if m>0: G.add_edge(m-1,m) G.name="lollipop_graph(%d,%d)"%(m,n) return G def null_graph(create_using=None): """ Return the Null graph with no nodes or edges. See empty_graph for the use of create_using. """ G=empty_graph(0,create_using) G.name="null_graph()" return G def path_graph(n,create_using=None): """Return the Path graph P_n of n nodes linearly connected by n-1 edges. Node labels are the integers 0 to n - 1. If create_using is a DiGraph then the edges are directed in increasing order. """ G=empty_graph(n,create_using) G.name="path_graph(%d)"%n G.add_edges_from([(v,v+1) for v in range(n-1)]) return G def star_graph(n,create_using=None): """ Return the Star graph with n+1 nodes: one center node, connected to n outer nodes. Node labels are the integers 0 to n. """ G=complete_bipartite_graph(1,n,create_using) G.name="star_graph(%d)"%n return G def trivial_graph(create_using=None): """ Return the Trivial graph with one node (with integer label 0) and no edges. """ G=empty_graph(1,create_using) G.name="trivial_graph()" return G def wheel_graph(n,create_using=None): """ Return the wheel graph: a single hub node connected to each node of the (n-1)-node cycle graph. Node labels are the integers 0 to n - 1. """ G=star_graph(n-1,create_using) G.name="wheel_graph(%d)"%n G.add_edges_from([(v,v+1) for v in range(1,n-1)]) if n>2: G.add_edge(1,n-1) return G networkx-1.8.1/networkx/generators/small.py0000664000175000017500000003107612177456333021020 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Various small and named graphs, together with some compact generators. """ __author__ ="""Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)""" # Copyright (C) 2004-2008 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __all__ = ['make_small_graph', 'LCF_graph', 'bull_graph', 'chvatal_graph', 'cubical_graph', 'desargues_graph', 'diamond_graph', 'dodecahedral_graph', 'frucht_graph', 'heawood_graph', 'house_graph', 'house_x_graph', 'icosahedral_graph', 'krackhardt_kite_graph', 'moebius_kantor_graph', 'octahedral_graph', 'pappus_graph', 'petersen_graph', 'sedgewick_maze_graph', 'tetrahedral_graph', 'truncated_cube_graph', 'truncated_tetrahedron_graph', 'tutte_graph'] import networkx as nx from networkx.generators.classic import empty_graph, cycle_graph, path_graph, complete_graph from networkx.exception import NetworkXError #------------------------------------------------------------------------------ # Tools for creating small graphs #------------------------------------------------------------------------------ def make_small_undirected_graph(graph_description, create_using=None): """ Return a small undirected graph described by graph_description. See make_small_graph. """ if create_using is not None and create_using.is_directed(): raise NetworkXError("Directed Graph not supported") return make_small_graph(graph_description, create_using) def make_small_graph(graph_description, create_using=None): """ Return the small graph described by graph_description. graph_description is a list of the form [ltype,name,n,xlist] Here ltype is one of "adjacencylist" or "edgelist", name is the name of the graph and n the number of nodes. This constructs a graph of n nodes with integer labels 0,..,n-1. If ltype="adjacencylist" then xlist is an adjacency list with exactly n entries, in with the j'th entry (which can be empty) specifies the nodes connected to vertex j. e.g. the "square" graph C_4 can be obtained by >>> G=nx.make_small_graph(["adjacencylist","C_4",4,[[2,4],[1,3],[2,4],[1,3]]]) or, since we do not need to add edges twice, >>> G=nx.make_small_graph(["adjacencylist","C_4",4,[[2,4],[3],[4],[]]]) If ltype="edgelist" then xlist is an edge list written as [[v1,w2],[v2,w2],...,[vk,wk]], where vj and wj integers in the range 1,..,n e.g. the "square" graph C_4 can be obtained by >>> G=nx.make_small_graph(["edgelist","C_4",4,[[1,2],[3,4],[2,3],[4,1]]]) Use the create_using argument to choose the graph class/type. """ ltype=graph_description[0] name=graph_description[1] n=graph_description[2] G=empty_graph(n, create_using) nodes=G.nodes() if ltype=="adjacencylist": adjlist=graph_description[3] if len(adjlist) != n: raise NetworkXError("invalid graph_description") G.add_edges_from([(u-1,v) for v in nodes for u in adjlist[v]]) elif ltype=="edgelist": edgelist=graph_description[3] for e in edgelist: v1=e[0]-1 v2=e[1]-1 if v1<0 or v1>n-1 or v2<0 or v2>n-1: raise NetworkXError("invalid graph_description") else: G.add_edge(v1,v2) G.name=name return G def LCF_graph(n,shift_list,repeats,create_using=None): """ Return the cubic graph specified in LCF notation. LCF notation (LCF=Lederberg-Coxeter-Fruchte) is a compressed notation used in the generation of various cubic Hamiltonian graphs of high symmetry. See, for example, dodecahedral_graph, desargues_graph, heawood_graph and pappus_graph below. n (number of nodes) The starting graph is the n-cycle with nodes 0,...,n-1. (The null graph is returned if n < 0.) shift_list = [s1,s2,..,sk], a list of integer shifts mod n, repeats integer specifying the number of times that shifts in shift_list are successively applied to each v_current in the n-cycle to generate an edge between v_current and v_current+shift mod n. For v1 cycling through the n-cycle a total of k*repeats with shift cycling through shiftlist repeats times connect v1 with v1+shift mod n The utility graph K_{3,3} >>> G=nx.LCF_graph(6,[3,-3],3) The Heawood graph >>> G=nx.LCF_graph(14,[5,-5],7) See http://mathworld.wolfram.com/LCFNotation.html for a description and references. """ if create_using is not None and create_using.is_directed(): raise NetworkXError("Directed Graph not supported") if n <= 0: return empty_graph(0, create_using) # start with the n-cycle G=cycle_graph(n, create_using) G.name="LCF_graph" nodes=G.nodes() n_extra_edges=repeats*len(shift_list) # edges are added n_extra_edges times # (not all of these need be new) if n_extra_edges < 1: return G for i in range(n_extra_edges): shift=shift_list[i%len(shift_list)] #cycle through shift_list v1=nodes[i%n] # cycle repeatedly through nodes v2=nodes[(i + shift)%n] G.add_edge(v1, v2) return G #------------------------------------------------------------------------------- # Various small and named graphs #------------------------------------------------------------------------------- def bull_graph(create_using=None): """Return the Bull graph. """ description=[ "adjacencylist", "Bull Graph", 5, [[2,3],[1,3,4],[1,2,5],[2],[3]] ] G=make_small_undirected_graph(description, create_using) return G def chvatal_graph(create_using=None): """Return the Chvátal graph.""" description=[ "adjacencylist", "Chvatal Graph", 12, [[2,5,7,10],[3,6,8],[4,7,9],[5,8,10], [6,9],[11,12],[11,12],[9,12], [11],[11,12],[],[]] ] G=make_small_undirected_graph(description, create_using) return G def cubical_graph(create_using=None): """Return the 3-regular Platonic Cubical graph.""" description=[ "adjacencylist", "Platonic Cubical Graph", 8, [[2,4,5],[1,3,8],[2,4,7],[1,3,6], [1,6,8],[4,5,7],[3,6,8],[2,5,7]] ] G=make_small_undirected_graph(description, create_using) return G def desargues_graph(create_using=None): """ Return the Desargues graph.""" G=LCF_graph(20, [5,-5,9,-9], 5, create_using) G.name="Desargues Graph" return G def diamond_graph(create_using=None): """Return the Diamond graph. """ description=[ "adjacencylist", "Diamond Graph", 4, [[2,3],[1,3,4],[1,2,4],[2,3]] ] G=make_small_undirected_graph(description, create_using) return G def dodecahedral_graph(create_using=None): """ Return the Platonic Dodecahedral graph. """ G=LCF_graph(20, [10,7,4,-4,-7,10,-4,7,-7,4], 2, create_using) G.name="Dodecahedral Graph" return G def frucht_graph(create_using=None): """Return the Frucht Graph. The Frucht Graph is the smallest cubical graph whose automorphism group consists only of the identity element. """ G=cycle_graph(7, create_using) G.add_edges_from([[0,7],[1,7],[2,8],[3,9],[4,9],[5,10],[6,10], [7,11],[8,11],[8,9],[10,11]]) G.name="Frucht Graph" return G def heawood_graph(create_using=None): """ Return the Heawood graph, a (3,6) cage. """ G=LCF_graph(14, [5,-5], 7, create_using) G.name="Heawood Graph" return G def house_graph(create_using=None): """Return the House graph (square with triangle on top).""" description=[ "adjacencylist", "House Graph", 5, [[2,3],[1,4],[1,4,5],[2,3,5],[3,4]] ] G=make_small_undirected_graph(description, create_using) return G def house_x_graph(create_using=None): """Return the House graph with a cross inside the house square.""" description=[ "adjacencylist", "House-with-X-inside Graph", 5, [[2,3,4],[1,3,4],[1,2,4,5],[1,2,3,5],[3,4]] ] G=make_small_undirected_graph(description, create_using) return G def icosahedral_graph(create_using=None): """Return the Platonic Icosahedral graph.""" description=[ "adjacencylist", "Platonic Icosahedral Graph", 12, [[2,6,8,9,12],[3,6,7,9],[4,7,9,10],[5,7,10,11], [6,7,11,12],[7,12],[],[9,10,11,12], [10],[11],[12],[]] ] G=make_small_undirected_graph(description, create_using) return G def krackhardt_kite_graph(create_using=None): """ Return the Krackhardt Kite Social Network. A 10 actor social network introduced by David Krackhardt to illustrate: degree, betweenness, centrality, closeness, etc. The traditional labeling is: Andre=1, Beverley=2, Carol=3, Diane=4, Ed=5, Fernando=6, Garth=7, Heather=8, Ike=9, Jane=10. """ description=[ "adjacencylist", "Krackhardt Kite Social Network", 10, [[2,3,4,6],[1,4,5,7],[1,4,6],[1,2,3,5,6,7],[2,4,7], [1,3,4,7,8],[2,4,5,6,8],[6,7,9],[8,10],[9]] ] G=make_small_undirected_graph(description, create_using) return G def moebius_kantor_graph(create_using=None): """Return the Moebius-Kantor graph.""" G=LCF_graph(16, [5,-5], 8, create_using) G.name="Moebius-Kantor Graph" return G def octahedral_graph(create_using=None): """Return the Platonic Octahedral graph.""" description=[ "adjacencylist", "Platonic Octahedral Graph", 6, [[2,3,4,5],[3,4,6],[5,6],[5,6],[6],[]] ] G=make_small_undirected_graph(description, create_using) return G def pappus_graph(): """ Return the Pappus graph.""" G=LCF_graph(18,[5,7,-7,7,-7,-5],3) G.name="Pappus Graph" return G def petersen_graph(create_using=None): """Return the Petersen graph.""" description=[ "adjacencylist", "Petersen Graph", 10, [[2,5,6],[1,3,7],[2,4,8],[3,5,9],[4,1,10],[1,8,9],[2,9,10], [3,6,10],[4,6,7],[5,7,8]] ] G=make_small_undirected_graph(description, create_using) return G def sedgewick_maze_graph(create_using=None): """ Return a small maze with a cycle. This is the maze used in Sedgewick,3rd Edition, Part 5, Graph Algorithms, Chapter 18, e.g. Figure 18.2 and following. Nodes are numbered 0,..,7 """ G=empty_graph(0, create_using) G.add_nodes_from(range(8)) G.add_edges_from([[0,2],[0,7],[0,5]]) G.add_edges_from([[1,7],[2,6]]) G.add_edges_from([[3,4],[3,5]]) G.add_edges_from([[4,5],[4,7],[4,6]]) G.name="Sedgewick Maze" return G def tetrahedral_graph(create_using=None): """ Return the 3-regular Platonic Tetrahedral graph.""" G=complete_graph(4, create_using) G.name="Platonic Tetrahedral graph" return G def truncated_cube_graph(create_using=None): """Return the skeleton of the truncated cube.""" description=[ "adjacencylist", "Truncated Cube Graph", 24, [[2,3,5],[12,15],[4,5],[7,9], [6],[17,19],[8,9],[11,13], [10],[18,21],[12,13],[15], [14],[22,23],[16],[20,24], [18,19],[21],[20],[24], [22],[23],[24],[]] ] G=make_small_undirected_graph(description, create_using) return G def truncated_tetrahedron_graph(create_using=None): """Return the skeleton of the truncated Platonic tetrahedron.""" G=path_graph(12, create_using) # G.add_edges_from([(1,3),(1,10),(2,7),(4,12),(5,12),(6,8),(9,11)]) G.add_edges_from([(0,2),(0,9),(1,6),(3,11),(4,11),(5,7),(8,10)]) G.name="Truncated Tetrahedron Graph" return G def tutte_graph(create_using=None): """Return the Tutte graph.""" description=[ "adjacencylist", "Tutte's Graph", 46, [[2,3,4],[5,27],[11,12],[19,20],[6,34], [7,30],[8,28],[9,15],[10,39],[11,38], [40],[13,40],[14,36],[15,16],[35], [17,23],[18,45],[19,44],[46],[21,46], [22,42],[23,24],[41],[25,28],[26,33], [27,32],[34],[29],[30,33],[31], [32,34],[33],[],[],[36,39], [37],[38,40],[39],[],[], [42,45],[43],[44,46],[45],[],[]] ] G=make_small_undirected_graph(description, create_using) return G networkx-1.8.1/networkx/generators/random_graphs.py0000664000175000017500000006551112177456333022535 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Generators for random graphs. """ # Copyright (C) 2004-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult (dschult@colgate.edu)']) import itertools import random import math import networkx as nx from networkx.generators.classic import empty_graph, path_graph, complete_graph from collections import defaultdict __all__ = ['fast_gnp_random_graph', 'gnp_random_graph', 'dense_gnm_random_graph', 'gnm_random_graph', 'erdos_renyi_graph', 'binomial_graph', 'newman_watts_strogatz_graph', 'watts_strogatz_graph', 'connected_watts_strogatz_graph', 'random_regular_graph', 'barabasi_albert_graph', 'powerlaw_cluster_graph', 'random_lobster', 'random_shell_graph', 'random_powerlaw_tree', 'random_powerlaw_tree_sequence'] #------------------------------------------------------------------------- # Some Famous Random Graphs #------------------------------------------------------------------------- def fast_gnp_random_graph(n, p, seed=None, directed=False): """Return a random graph G_{n,p} (ErdÅ‘s-Rényi graph, binomial graph). Parameters ---------- n : int The number of nodes. p : float Probability for edge creation. seed : int, optional Seed for random number generator (default=None). directed : bool, optional (default=False) If True return a directed graph Notes ----- The G_{n,p} graph algorithm chooses each of the [n(n-1)]/2 (undirected) or n(n-1) (directed) possible edges with probability p. This algorithm is O(n+m) where m is the expected number of edges m=p*n*(n-1)/2. It should be faster than gnp_random_graph when p is small and the expected number of edges is small (sparse graph). See Also -------- gnp_random_graph References ---------- .. [1] Vladimir Batagelj and Ulrik Brandes, "Efficient generation of large random networks", Phys. Rev. E, 71, 036113, 2005. """ G = empty_graph(n) G.name="fast_gnp_random_graph(%s,%s)"%(n,p) if not seed is None: random.seed(seed) if p <= 0 or p >= 1: return nx.gnp_random_graph(n,p,directed=directed) v = 1 # Nodes in graph are from 0,n-1 (this is the second node index). w = -1 lp = math.log(1.0 - p) if directed: G=nx.DiGraph(G) while v < n: lr = math.log(1.0 - random.random()) w = w + 1 + int(lr/lp) if v == w: # avoid self loops w = w + 1 while w >= n and v < n: w = w - n v = v + 1 if v == w: # avoid self loops w = w + 1 if v < n: G.add_edge(v, w) else: while v < n: lr = math.log(1.0 - random.random()) w = w + 1 + int(lr/lp) while w >= v and v < n: w = w - v v = v + 1 if v < n: G.add_edge(v, w) return G def gnp_random_graph(n, p, seed=None, directed=False): """Return a random graph G_{n,p} (ErdÅ‘s-Rényi graph, binomial graph). Chooses each of the possible edges with probability p. This is also called binomial_graph and erdos_renyi_graph. Parameters ---------- n : int The number of nodes. p : float Probability for edge creation. seed : int, optional Seed for random number generator (default=None). directed : bool, optional (default=False) If True return a directed graph See Also -------- fast_gnp_random_graph Notes ----- This is an O(n^2) algorithm. For sparse graphs (small p) see fast_gnp_random_graph for a faster algorithm. References ---------- .. [1] P. ErdÅ‘s and A. Rényi, On Random Graphs, Publ. Math. 6, 290 (1959). .. [2] E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959). """ if directed: G=nx.DiGraph() else: G=nx.Graph() G.add_nodes_from(range(n)) G.name="gnp_random_graph(%s,%s)"%(n,p) if p<=0: return G if p>=1: return complete_graph(n,create_using=G) if not seed is None: random.seed(seed) if G.is_directed(): edges=itertools.permutations(range(n),2) else: edges=itertools.combinations(range(n),2) for e in edges: if random.random() < p: G.add_edge(*e) return G # add some aliases to common names binomial_graph=gnp_random_graph erdos_renyi_graph=gnp_random_graph def dense_gnm_random_graph(n, m, seed=None): """Return the random graph G_{n,m}. Gives a graph picked randomly out of the set of all graphs with n nodes and m edges. This algorithm should be faster than gnm_random_graph for dense graphs. Parameters ---------- n : int The number of nodes. m : int The number of edges. seed : int, optional Seed for random number generator (default=None). See Also -------- gnm_random_graph() Notes ----- Algorithm by Keith M. Briggs Mar 31, 2006. Inspired by Knuth's Algorithm S (Selection sampling technique), in section 3.4.2 of [1]_. References ---------- .. [1] Donald E. Knuth, The Art of Computer Programming, Volume 2/Seminumerical algorithms, Third Edition, Addison-Wesley, 1997. """ mmax=n*(n-1)/2 if m>=mmax: G=complete_graph(n) else: G=empty_graph(n) G.name="dense_gnm_random_graph(%s,%s)"%(n,m) if n==1 or m>=mmax: return G if seed is not None: random.seed(seed) u=0 v=1 t=0 k=0 while True: if random.randrange(mmax-t)=max_edges: return complete_graph(n,create_using=G) nlist=G.nodes() edge_count=0 while edge_count < m: # generate random edge,u,v u = random.choice(nlist) v = random.choice(nlist) if u==v or G.has_edge(u,v): continue else: G.add_edge(u,v) edge_count=edge_count+1 return G def newman_watts_strogatz_graph(n, k, p, seed=None): """Return a Newman-Watts-Strogatz small world graph. Parameters ---------- n : int The number of nodes k : int Each node is connected to k nearest neighbors in ring topology p : float The probability of adding a new edge for each edge seed : int, optional seed for random number generator (default=None) Notes ----- First create a ring over n nodes. Then each node in the ring is connected with its k nearest neighbors (k-1 neighbors if k is odd). Then shortcuts are created by adding new edges as follows: for each edge u-v in the underlying "n-ring with k nearest neighbors" with probability p add a new edge u-w with randomly-chosen existing node w. In contrast with watts_strogatz_graph(), no edges are removed. See Also -------- watts_strogatz_graph() References ---------- .. [1] M. E. J. Newman and D. J. Watts, Renormalization group analysis of the small-world network model, Physics Letters A, 263, 341, 1999. http://dx.doi.org/10.1016/S0375-9601(99)00757-4 """ if seed is not None: random.seed(seed) if k>=n: raise nx.NetworkXError("k>=n, choose smaller k or larger n") G=empty_graph(n) G.name="newman_watts_strogatz_graph(%s,%s,%s)"%(n,k,p) nlist = G.nodes() fromv = nlist # connect the k/2 neighbors for j in range(1, k // 2+1): tov = fromv[j:] + fromv[0:j] # the first j are now last for i in range(len(fromv)): G.add_edge(fromv[i], tov[i]) # for each edge u-v, with probability p, randomly select existing # node w and add new edge u-w e = G.edges() for (u, v) in e: if random.random() < p: w = random.choice(nlist) # no self-loops and reject if edge u-w exists # is that the correct NWS model? while w == u or G.has_edge(u, w): w = random.choice(nlist) if G.degree(u) >= n-1: break # skip this rewiring else: G.add_edge(u,w) return G def watts_strogatz_graph(n, k, p, seed=None): """Return a Watts-Strogatz small-world graph. Parameters ---------- n : int The number of nodes k : int Each node is connected to k nearest neighbors in ring topology p : float The probability of rewiring each edge seed : int, optional Seed for random number generator (default=None) See Also -------- newman_watts_strogatz_graph() connected_watts_strogatz_graph() Notes ----- First create a ring over n nodes. Then each node in the ring is connected with its k nearest neighbors (k-1 neighbors if k is odd). Then shortcuts are created by replacing some edges as follows: for each edge u-v in the underlying "n-ring with k nearest neighbors" with probability p replace it with a new edge u-w with uniformly random choice of existing node w. In contrast with newman_watts_strogatz_graph(), the random rewiring does not increase the number of edges. The rewired graph is not guaranteed to be connected as in connected_watts_strogatz_graph(). References ---------- .. [1] Duncan J. Watts and Steven H. Strogatz, Collective dynamics of small-world networks, Nature, 393, pp. 440--442, 1998. """ if k>=n: raise nx.NetworkXError("k>=n, choose smaller k or larger n") if seed is not None: random.seed(seed) G = nx.Graph() G.name="watts_strogatz_graph(%s,%s,%s)"%(n,k,p) nodes = list(range(n)) # nodes are labeled 0 to n-1 # connect each node to k/2 neighbors for j in range(1, k // 2+1): targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list G.add_edges_from(zip(nodes,targets)) # rewire edges from each node # loop over all nodes in order (label) and neighbors in order (distance) # no self loops or multiple edges allowed for j in range(1, k // 2+1): # outer loop is neighbors targets = nodes[j:] + nodes[0:j] # first j nodes are now last in list # inner loop in node order for u,v in zip(nodes,targets): if random.random() < p: w = random.choice(nodes) # Enforce no self-loops or multiple edges while w == u or G.has_edge(u, w): w = random.choice(nodes) if G.degree(u) >= n-1: break # skip this rewiring else: G.remove_edge(u,v) G.add_edge(u,w) return G def connected_watts_strogatz_graph(n, k, p, tries=100, seed=None): """Return a connected Watts-Strogatz small-world graph. Attempt to generate a connected realization by repeated generation of Watts-Strogatz small-world graphs. An exception is raised if the maximum number of tries is exceeded. Parameters ---------- n : int The number of nodes k : int Each node is connected to k nearest neighbors in ring topology p : float The probability of rewiring each edge tries : int Number of attempts to generate a connected graph. seed : int, optional The seed for random number generator. See Also -------- newman_watts_strogatz_graph() watts_strogatz_graph() """ G = watts_strogatz_graph(n, k, p, seed) t=1 while not nx.is_connected(G): G = watts_strogatz_graph(n, k, p, seed) t=t+1 if t>tries: raise nx.NetworkXError("Maximum number of tries exceeded") return G def random_regular_graph(d, n, seed=None): """Return a random regular graph of n nodes each with degree d. The resulting graph G has no self-loops or parallel edges. Parameters ---------- d : int Degree n : integer Number of nodes. The value of n*d must be even. seed : hashable object The seed for random number generator. Notes ----- The nodes are numbered form 0 to n-1. Kim and Vu's paper [2]_ shows that this algorithm samples in an asymptotically uniform way from the space of random graphs when d = O(n**(1/3-epsilon)). References ---------- .. [1] A. Steger and N. Wormald, Generating random regular graphs quickly, Probability and Computing 8 (1999), 377-396, 1999. http://citeseer.ist.psu.edu/steger99generating.html .. [2] Jeong Han Kim and Van H. Vu, Generating random regular graphs, Proceedings of the thirty-fifth ACM symposium on Theory of computing, San Diego, CA, USA, pp 213--222, 2003. http://portal.acm.org/citation.cfm?id=780542.780576 """ if (n * d) % 2 != 0: raise nx.NetworkXError("n * d must be even") if not 0 <= d < n: raise nx.NetworkXError("the 0 <= d < n inequality must be satisfied") if seed is not None: random.seed(seed) def _suitable(edges, potential_edges): # Helper subroutine to check if there are suitable edges remaining # If False, the generation of the graph has failed if not potential_edges: return True for s1 in potential_edges: for s2 in potential_edges: # Two iterators on the same dictionary are guaranteed # to visit it in the same order if there are no # intervening modifications. if s1 == s2: # Only need to consider s1-s2 pair one time break if s1 > s2: s1, s2 = s2, s1 if (s1, s2) not in edges: return True return False def _try_creation(): # Attempt to create an edge set edges = set() stubs = list(range(n)) * d while stubs: potential_edges = defaultdict(lambda: 0) random.shuffle(stubs) stubiter = iter(stubs) for s1, s2 in zip(stubiter, stubiter): if s1 > s2: s1, s2 = s2, s1 if s1 != s2 and ((s1, s2) not in edges): edges.add((s1, s2)) else: potential_edges[s1] += 1 potential_edges[s2] += 1 if not _suitable(edges, potential_edges): return None # failed to find suitable edge set stubs = [node for node, potential in potential_edges.items() for _ in range(potential)] return edges # Even though a suitable edge set exists, # the generation of such a set is not guaranteed. # Try repeatedly to find one. edges = _try_creation() while edges is None: edges = _try_creation() G = nx.Graph() G.name = "random_regular_graph(%s, %s)" % (d, n) G.add_edges_from(edges) return G def _random_subset(seq,m): """ Return m unique elements from seq. This differs from random.sample which can return repeated elements if seq holds repeated elements. """ targets=set() while len(targets)=n: raise nx.NetworkXError(\ "Barabási-Albert network must have m>=1 and m1 and m 1 or p < 0: raise nx.NetworkXError(\ "NetworkXError p must be in [0,1], p=%f"%(p)) if seed is not None: random.seed(seed) G=empty_graph(m) # add m initial nodes (m0 in barabasi-speak) G.name="Powerlaw-Cluster Graph" repeated_nodes=G.nodes() # list of existing nodes to sample from # with nodes repeated once for each adjacent edge source=m # next node is m while source>> constructor=[(10,20,0.8),(20,40,0.8)] >>> G=nx.random_shell_graph(constructor) """ G=empty_graph(0) G.name="random_shell_graph(constructor)" if seed is not None: random.seed(seed) glist=[] intra_edges=[] nnodes=0 # create gnm graphs for each shell for (n,m,d) in constructor: inter_edges=int(m*d) intra_edges.append(m-inter_edges) g=nx.convert_node_labels_to_integers( gnm_random_graph(n,inter_edges), first_label=nnodes) glist.append(g) nnodes+=n G=nx.operators.union(G,g) # connect the shells randomly for gi in range(len(glist)-1): nlist1=glist[gi].nodes() nlist2=glist[gi+1].nodes() total_edges=intra_edges[gi] edge_count=0 while edge_count < total_edges: u = random.choice(nlist1) v = random.choice(nlist2) if u==v or G.has_edge(u,v): continue else: G.add_edge(u,v) edge_count=edge_count+1 return G def random_powerlaw_tree(n, gamma=3, seed=None, tries=100): """Return a tree with a powerlaw degree distribution. Parameters ---------- n : int, The number of nodes gamma : float Exponent of the power-law seed : int, optional Seed for random number generator (default=None). tries : int Number of attempts to adjust sequence to make a tree Notes ----- A trial powerlaw degree sequence is chosen and then elements are swapped with new elements from a powerlaw distribution until the sequence makes a tree (#edges=#nodes-1). """ from networkx.generators.degree_seq import degree_sequence_tree try: s=random_powerlaw_tree_sequence(n, gamma=gamma, seed=seed, tries=tries) except: raise nx.NetworkXError(\ "Exceeded max (%d) attempts for a valid tree sequence."%tries) G=degree_sequence_tree(s) G.name="random_powerlaw_tree(%s,%s)"%(n,gamma) return G def random_powerlaw_tree_sequence(n, gamma=3, seed=None, tries=100): """ Return a degree sequence for a tree with a powerlaw distribution. Parameters ---------- n : int, The number of nodes gamma : float Exponent of the power-law seed : int, optional Seed for random number generator (default=None). tries : int Number of attempts to adjust sequence to make a tree Notes ----- A trial powerlaw degree sequence is chosen and then elements are swapped with new elements from a powerlaw distribution until the sequence makes a tree (#edges=#nodes-1). """ if seed is not None: random.seed(seed) # get trial sequence z=nx.utils.powerlaw_sequence(n,exponent=gamma) # round to integer values in the range [0,n] zseq=[min(n, max( int(round(s)),0 )) for s in z] # another sequence to swap values from z=nx.utils.powerlaw_sequence(tries,exponent=gamma) # round to integer values in the range [0,n] swap=[min(n, max( int(round(s)),0 )) for s in z] for deg in swap: if n-sum(zseq)/2.0 == 1.0: # is a tree, return sequence return zseq index=random.randint(0,n-1) zseq[index]=swap.pop() raise nx.NetworkXError(\ "Exceeded max (%d) attempts for a valid tree sequence."%tries) return False networkx-1.8.1/networkx/generators/bipartite.py0000664000175000017500000004064512177456333021675 0ustar aricaric00000000000000# -*- coding: utf-8 -*- """ Generators and functions for bipartite graphs. """ # Copyright (C) 2006-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import math import random import networkx from functools import reduce import networkx as nx __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) __all__=['bipartite_configuration_model', 'bipartite_havel_hakimi_graph', 'bipartite_reverse_havel_hakimi_graph', 'bipartite_alternating_havel_hakimi_graph', 'bipartite_preferential_attachment_graph', 'bipartite_random_graph', 'bipartite_gnmk_random_graph', ] def bipartite_configuration_model(aseq, bseq, create_using=None, seed=None): """Return a random bipartite graph from two given degree sequences. Parameters ---------- aseq : list or iterator Degree sequence for node set A. bseq : list or iterator Degree sequence for node set B. create_using : NetworkX graph instance, optional Return graph of this type. seed : integer, optional Seed for random number generator. Nodes from the set A are connected to nodes in the set B by choosing randomly from the possible free stubs, one in A and one in B. Notes ----- The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() but then the resulting degree sequences might not be exact. The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. """ if create_using is None: create_using=networkx.MultiGraph() elif create_using.is_directed(): raise networkx.NetworkXError(\ "Directed Graph not supported") G=networkx.empty_graph(0,create_using) if not seed is None: random.seed(seed) # length and sum of each sequence lena=len(aseq) lenb=len(bseq) suma=sum(aseq) sumb=sum(bseq) if not suma==sumb: raise networkx.NetworkXError(\ 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s'\ %(suma,sumb)) G=_add_nodes_with_bipartite_label(G,lena,lenb) if max(aseq)==0: return G # done if no edges # build lists of degree-repeated vertex numbers stubs=[] stubs.extend([[v]*aseq[v] for v in range(0,lena)]) astubs=[] astubs=[x for subseq in stubs for x in subseq] stubs=[] stubs.extend([[v]*bseq[v-lena] for v in range(lena,lena+lenb)]) bstubs=[] bstubs=[x for subseq in stubs for x in subseq] # shuffle lists random.shuffle(astubs) random.shuffle(bstubs) G.add_edges_from([[astubs[i],bstubs[i]] for i in range(suma)]) G.name="bipartite_configuration_model" return G def bipartite_havel_hakimi_graph(aseq, bseq, create_using=None): """Return a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. Nodes from the set A are connected to nodes in the set B by connecting the highest degree nodes in set A to the highest degree nodes in set B until all stubs are connected. Parameters ---------- aseq : list or iterator Degree sequence for node set A. bseq : list or iterator Degree sequence for node set B. create_using : NetworkX graph instance, optional Return graph of this type. Notes ----- The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() but then the resulting degree sequences might not be exact. The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. """ if create_using is None: create_using=networkx.MultiGraph() elif create_using.is_directed(): raise networkx.NetworkXError(\ "Directed Graph not supported") G=networkx.empty_graph(0,create_using) # length of the each sequence naseq=len(aseq) nbseq=len(bseq) suma=sum(aseq) sumb=sum(bseq) if not suma==sumb: raise networkx.NetworkXError(\ 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s'\ %(suma,sumb)) G=_add_nodes_with_bipartite_label(G,naseq,nbseq) if max(aseq)==0: return G # done if no edges # build list of degree-repeated vertex numbers astubs=[[aseq[v],v] for v in range(0,naseq)] bstubs=[[bseq[v-naseq],v] for v in range(naseq,naseq+nbseq)] astubs.sort() while astubs: (degree,u)=astubs.pop() # take of largest degree node in the a set if degree==0: break # done, all are zero # connect the source to largest degree nodes in the b set bstubs.sort() for target in bstubs[-degree:]: v=target[1] G.add_edge(u,v) target[0] -= 1 # note this updates bstubs too. if target[0]==0: bstubs.remove(target) G.name="bipartite_havel_hakimi_graph" return G def bipartite_reverse_havel_hakimi_graph(aseq, bseq, create_using=None): """Return a bipartite graph from two given degree sequences using a Havel-Hakimi style construction. Nodes from set A are connected to nodes in the set B by connecting the highest degree nodes in set A to the lowest degree nodes in set B until all stubs are connected. Parameters ---------- aseq : list or iterator Degree sequence for node set A. bseq : list or iterator Degree sequence for node set B. create_using : NetworkX graph instance, optional Return graph of this type. Notes ----- The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() but then the resulting degree sequences might not be exact. The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. """ if create_using is None: create_using=networkx.MultiGraph() elif create_using.is_directed(): raise networkx.NetworkXError(\ "Directed Graph not supported") G=networkx.empty_graph(0,create_using) # length of the each sequence lena=len(aseq) lenb=len(bseq) suma=sum(aseq) sumb=sum(bseq) if not suma==sumb: raise networkx.NetworkXError(\ 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s'\ %(suma,sumb)) G=_add_nodes_with_bipartite_label(G,lena,lenb) if max(aseq)==0: return G # done if no edges # build list of degree-repeated vertex numbers astubs=[[aseq[v],v] for v in range(0,lena)] bstubs=[[bseq[v-lena],v] for v in range(lena,lena+lenb)] astubs.sort() bstubs.sort() while astubs: (degree,u)=astubs.pop() # take of largest degree node in the a set if degree==0: break # done, all are zero # connect the source to the smallest degree nodes in the b set for target in bstubs[0:degree]: v=target[1] G.add_edge(u,v) target[0] -= 1 # note this updates bstubs too. if target[0]==0: bstubs.remove(target) G.name="bipartite_reverse_havel_hakimi_graph" return G def bipartite_alternating_havel_hakimi_graph(aseq, bseq,create_using=None): """Return a bipartite graph from two given degree sequences using an alternating Havel-Hakimi style construction. Nodes from the set A are connected to nodes in the set B by connecting the highest degree nodes in set A to alternatively the highest and the lowest degree nodes in set B until all stubs are connected. Parameters ---------- aseq : list or iterator Degree sequence for node set A. bseq : list or iterator Degree sequence for node set B. create_using : NetworkX graph instance, optional Return graph of this type. Notes ----- The sum of the two sequences must be equal: sum(aseq)=sum(bseq) If no graph type is specified use MultiGraph with parallel edges. If you want a graph with no parallel edges use create_using=Graph() but then the resulting degree sequences might not be exact. The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. """ if create_using is None: create_using=networkx.MultiGraph() elif create_using.is_directed(): raise networkx.NetworkXError(\ "Directed Graph not supported") G=networkx.empty_graph(0,create_using) # length of the each sequence naseq=len(aseq) nbseq=len(bseq) suma=sum(aseq) sumb=sum(bseq) if not suma==sumb: raise networkx.NetworkXError(\ 'invalid degree sequences, sum(aseq)!=sum(bseq),%s,%s'\ %(suma,sumb)) G=_add_nodes_with_bipartite_label(G,naseq,nbseq) if max(aseq)==0: return G # done if no edges # build list of degree-repeated vertex numbers astubs=[[aseq[v],v] for v in range(0,naseq)] bstubs=[[bseq[v-naseq],v] for v in range(naseq,naseq+nbseq)] while astubs: astubs.sort() (degree,u)=astubs.pop() # take of largest degree node in the a set if degree==0: break # done, all are zero bstubs.sort() small=bstubs[0:degree // 2] # add these low degree targets large=bstubs[(-degree+degree // 2):] # and these high degree targets stubs=[x for z in zip(large,small) for x in z] # combine, sorry if len(stubs) 1: raise networkx.NetworkXError("probability %s > 1"%(p)) G=networkx.empty_graph(0,create_using) if not seed is None: random.seed(seed) naseq=len(aseq) G=_add_nodes_with_bipartite_label(G,naseq,0) vv=[ [v]*aseq[v] for v in range(0,naseq)] while vv: while vv[0]: source=vv[0][0] vv[0].remove(source) if random.random() < p or G.number_of_nodes() == naseq: target=G.number_of_nodes() G.add_node(target,bipartite=1) G.add_edge(source,target) else: bb=[ [b]*G.degree(b) for b in range(naseq,G.number_of_nodes())] # flatten the list of lists into a list. bbstubs=reduce(lambda x,y: x+y, bb) # choose preferentially a bottom node. target=random.choice(bbstubs) G.add_node(target,bipartite=1) G.add_edge(source,target) vv.remove(vv[0]) G.name="bipartite_preferential_attachment_model" return G def bipartite_random_graph(n, m, p, seed=None, directed=False): """Return a bipartite random graph. This is a bipartite version of the binomial (ErdÅ‘s-Rényi) graph. Parameters ---------- n : int The number of nodes in the first bipartite set. m : int The number of nodes in the second bipartite set. p : float Probability for edge creation. seed : int, optional Seed for random number generator (default=None). directed : bool, optional (default=False) If True return a directed graph Notes ----- The bipartite random graph algorithm chooses each of the n*m (undirected) or 2*nm (directed) possible edges with probability p. This algorithm is O(n+m) where m is the expected number of edges. The nodes are assigned the attribute 'bipartite' with the value 0 or 1 to indicate which bipartite set the node belongs to. See Also -------- gnp_random_graph, bipartite_configuration_model References ---------- .. [1] Vladimir Batagelj and Ulrik Brandes, "Efficient generation of large random networks", Phys. Rev. E, 71, 036113, 2005. """ G=nx.Graph() G=_add_nodes_with_bipartite_label(G,n,m) if directed: G=nx.DiGraph(G) G.name="fast_gnp_random_graph(%s,%s,%s)"%(n,m,p) if not seed is None: random.seed(seed) if p <= 0: return G if p >= 1: return nx.complete_bipartite_graph(n,m) lp = math.log(1.0 - p) v = 0 w = -1 while v < n: lr = math.log(1.0 - random.random()) w = w + 1 + int(lr/lp) while w >= m and v < n: w = w - m v = v + 1 if v < n: G.add_edge(v, n+w) if directed: # use the same algorithm to # add edges from the "m" to "n" set v = 0 w = -1 while v < n: lr = math.log(1.0 - random.random()) w = w + 1 + int(lr/lp) while w>= m and v < n: w = w - m v = v + 1 if v < n: G.add_edge(n+w, v) return G def bipartite_gnmk_random_graph(n, m, k, seed=None, directed=False): """Return a random bipartite graph G_{n,m,k}. Produces a bipartite graph chosen randomly out of the set of all graphs with n top nodes, m bottom nodes, and k edges. Parameters ---------- n : int The number of nodes in the first bipartite set. m : int The number of nodes in the second bipartite set. k : int The number of edges seed : int, optional Seed for random number generator (default=None). directed : bool, optional (default=False) If True return a directed graph Examples -------- G = nx.bipartite_gnmk_random_graph(10,20,50) See Also -------- gnm_random_graph Notes ----- If k > m * n then a complete bipartite graph is returned. This graph is a bipartite version of the `G_{nm}` random graph model. """ G = networkx.Graph() G=_add_nodes_with_bipartite_label(G,n,m) if directed: G=nx.DiGraph(G) G.name="bipartite_gnm_random_graph(%s,%s,%s)"%(n,m,k) if seed is not None: random.seed(seed) if n == 1 or m == 1: return G max_edges = n*m # max_edges for bipartite networks if k >= max_edges: # Maybe we should raise an exception here return networkx.complete_bipartite_graph(n, m, create_using=G) top = [n for n,d in G.nodes(data=True) if d['bipartite']==0] bottom = list(set(G) - set(top)) edge_count = 0 while edge_count < k: # generate random edge,u,v u = random.choice(top) v = random.choice(bottom) if v in G[u]: continue else: G.add_edge(u,v) edge_count += 1 return G def _add_nodes_with_bipartite_label(G, lena, lenb): G.add_nodes_from(range(0,lena+lenb)) b=dict(zip(range(0,lena),[0]*lena)) b.update(dict(zip(range(lena,lena+lenb),[1]*lenb))) nx.set_node_attributes(G,'bipartite',b) return G networkx-1.8.1/networkx/generators/social.py0000664000175000017500000002620312177456333021156 0ustar aricaric00000000000000""" Famous social networks. """ import networkx as nx __author__ = """\n""".join(['Jordi Torrents ', 'Katy Bold ', 'Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import heapq from itertools import combinations, permutations import math from operator import itemgetter import random import networkx as nx from networkx.utils import random_weighted_sample __author__ = "\n".join(['Aric Hagberg ', 'Pieter Swart ', 'Dan Schult ' 'Joel Miller ', 'Nathan Lemons ' 'Brian Cloteaux ']) __all__ = ['configuration_model', 'directed_configuration_model', 'expected_degree_graph', 'havel_hakimi_graph', 'directed_havel_hakimi_graph', 'degree_sequence_tree', 'random_degree_sequence_graph'] def configuration_model(deg_sequence,create_using=None,seed=None): """Return a random graph with the given degree sequence. The configuration model generates a random pseudograph (graph with parallel edges and self loops) by randomly assigning edges to match the given degree sequence. Parameters ---------- deg_sequence : list of integers Each list entry corresponds to the degree of a node. create_using : graph, optional (default MultiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional Seed for random number generator. Returns ------- G : MultiGraph A graph with the specified degree sequence. Nodes are labeled starting at 0 with an index corresponding to the position in deg_sequence. Raises ------ NetworkXError If the degree sequence does not have an even sum. See Also -------- is_valid_degree_sequence Notes ----- As described by Newman [1]_. A non-graphical degree sequence (not realizable by some simple graph) is allowed since this function returns graphs with self loops and parallel edges. An exception is raised if the degree sequence does not have an even sum. This configuration model construction process can lead to duplicate edges and loops. You can remove the self-loops and parallel edges (see below) which will likely result in a graph that doesn't have the exact degree sequence specified. This "finite-size effect" decreases as the size of the graph increases. References ---------- .. [1] M.E.J. Newman, "The structure and function of complex networks", SIAM REVIEW 45-2, pp 167-256, 2003. Examples -------- >>> from networkx.utils import powerlaw_sequence >>> z=nx.utils.create_degree_sequence(100,powerlaw_sequence) >>> G=nx.configuration_model(z) To remove parallel edges: >>> G=nx.Graph(G) To remove self loops: >>> G.remove_edges_from(G.selfloop_edges()) """ if not sum(deg_sequence)%2 ==0: raise nx.NetworkXError('Invalid degree sequence') if create_using is None: create_using = nx.MultiGraph() elif create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") if not seed is None: random.seed(seed) # start with empty N-node graph N=len(deg_sequence) # allow multiedges and selfloops G=nx.empty_graph(N,create_using) if N==0 or max(deg_sequence)==0: # done if no edges return G # build stublist, a list of available degree-repeated stubs # e.g. for deg_sequence=[3,2,1,1,1] # initially, stublist=[1,1,1,2,2,3,4,5] # i.e., node 1 has degree=3 and is repeated 3 times, etc. stublist=[] for n in G: for i in range(deg_sequence[n]): stublist.append(n) # shuffle stublist and assign pairs by removing 2 elements at a time random.shuffle(stublist) while stublist: n1 = stublist.pop() n2 = stublist.pop() G.add_edge(n1,n2) G.name="configuration_model %d nodes %d edges"%(G.order(),G.size()) return G def directed_configuration_model(in_degree_sequence, out_degree_sequence, create_using=None,seed=None): """Return a directed_random graph with the given degree sequences. The configuration model generates a random directed pseudograph (graph with parallel edges and self loops) by randomly assigning edges to match the given degree sequences. Parameters ---------- in_degree_sequence : list of integers Each list entry corresponds to the in-degree of a node. out_degree_sequence : list of integers Each list entry corresponds to the out-degree of a node. create_using : graph, optional (default MultiDiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional Seed for random number generator. Returns ------- G : MultiDiGraph A graph with the specified degree sequences. Nodes are labeled starting at 0 with an index corresponding to the position in deg_sequence. Raises ------ NetworkXError If the degree sequences do not have the same sum. See Also -------- configuration_model Notes ----- Algorithm as described by Newman [1]_. A non-graphical degree sequence (not realizable by some simple graph) is allowed since this function returns graphs with self loops and parallel edges. An exception is raised if the degree sequences does not have the same sum. This configuration model construction process can lead to duplicate edges and loops. You can remove the self-loops and parallel edges (see below) which will likely result in a graph that doesn't have the exact degree sequence specified. This "finite-size effect" decreases as the size of the graph increases. References ---------- .. [1] Newman, M. E. J. and Strogatz, S. H. and Watts, D. J. Random graphs with arbitrary degree distributions and their applications Phys. Rev. E, 64, 026118 (2001) Examples -------- >>> D=nx.DiGraph([(0,1),(1,2),(2,3)]) # directed path graph >>> din=list(D.in_degree().values()) >>> dout=list(D.out_degree().values()) >>> din.append(1) >>> dout[0]=2 >>> D=nx.directed_configuration_model(din,dout) To remove parallel edges: >>> D=nx.DiGraph(D) To remove self loops: >>> D.remove_edges_from(D.selfloop_edges()) """ if not sum(in_degree_sequence) == sum(out_degree_sequence): raise nx.NetworkXError('Invalid degree sequences. ' 'Sequences must have equal sums.') if create_using is None: create_using = nx.MultiDiGraph() if not seed is None: random.seed(seed) nin=len(in_degree_sequence) nout=len(out_degree_sequence) # pad in- or out-degree sequence with zeros to match lengths if nin>nout: out_degree_sequence.extend((nin-nout)*[0]) else: in_degree_sequence.extend((nout-nin)*[0]) # start with empty N-node graph N=len(in_degree_sequence) # allow multiedges and selfloops G=nx.empty_graph(N,create_using) if N==0 or max(in_degree_sequence)==0: # done if no edges return G # build stublists of available degree-repeated stubs # e.g. for degree_sequence=[3,2,1,1,1] # initially, stublist=[1,1,1,2,2,3,4,5] # i.e., node 1 has degree=3 and is repeated 3 times, etc. in_stublist=[] for n in G: for i in range(in_degree_sequence[n]): in_stublist.append(n) out_stublist=[] for n in G: for i in range(out_degree_sequence[n]): out_stublist.append(n) # shuffle stublists and assign pairs by removing 2 elements at a time random.shuffle(in_stublist) random.shuffle(out_stublist) while in_stublist and out_stublist: source = out_stublist.pop() target = in_stublist.pop() G.add_edge(source,target) G.name="directed configuration_model %d nodes %d edges"%(G.order(),G.size()) return G def expected_degree_graph(w, seed=None, selfloops=True): r"""Return a random graph with given expected degrees. Given a sequence of expected degrees `W=(w_0,w_1,\ldots,w_{n-1}`) of length `n` this algorithm assigns an edge between node `u` and node `v` with probability .. math:: p_{uv} = \frac{w_u w_v}{\sum_k w_k} . Parameters ---------- w : list The list of expected degrees. selfloops: bool (default=True) Set to False to remove the possibility of self-loop edges. seed : hashable object, optional The seed for the random number generator. Returns ------- Graph Examples -------- >>> z=[10 for i in range(100)] >>> G=nx.expected_degree_graph(z) Notes ----- The nodes have integer labels corresponding to index of expected degrees input sequence. The complexity of this algorithm is `\mathcal{O}(n+m)` where `n` is the number of nodes and `m` is the expected number of edges. The model in [1]_ includes the possibility of self-loop edges. Set selfloops=False to produce a graph without self loops. For finite graphs this model doesn't produce exactly the given expected degree sequence. Instead the expected degrees are as follows. For the case without self loops (selfloops=False), .. math:: E[deg(u)] = \sum_{v \ne u} p_{uv} = w_u \left( 1 - \frac{w_u}{\sum_k w_k} \right) . NetworkX uses the standard convention that a self-loop edge counts 2 in the degree of a node, so with self loops (selfloops=True), .. math:: E[deg(u)] = \sum_{v \ne u} p_{uv} + 2 p_{uu} = w_u \left( 1 + \frac{w_u}{\sum_k w_k} \right) . References ---------- .. [1] Fan Chung and L. Lu, Connected components in random graphs with given expected degree sequences, Ann. Combinatorics, 6, pp. 125-145, 2002. .. [2] Joel Miller and Aric Hagberg, Efficient generation of networks with given expected degrees, in Algorithms and Models for the Web-Graph (WAW 2011), Alan Frieze, Paul Horn, and PaweÅ‚ PraÅ‚at (Eds), LNCS 6732, pp. 115-126, 2011. """ n = len(w) G=nx.empty_graph(n) if n==0 or max(w)==0: # done if no edges return G if seed is not None: random.seed(seed) rho = 1/float(sum(w)) # sort weights, largest first # preserve order of weights for integer node label mapping order = sorted(enumerate(w),key=itemgetter(1),reverse=True) mapping = dict((c,uv[0]) for c,uv in enumerate(order)) seq = [v for u,v in order] last=n if not selfloops: last-=1 for u in range(last): v = u if not selfloops: v += 1 factor = seq[u] * rho p = seq[v]*factor if p>1: p = 1 while v0: if p != 1: r = random.random() v += int(math.floor(math.log(r)/math.log(1-p))) if v < n: q = seq[v]*factor if q>1: q = 1 if random.random() < q/p: G.add_edge(mapping[u],mapping[v]) v += 1 p = q return G def havel_hakimi_graph(deg_sequence,create_using=None): """Return a simple graph with given degree sequence constructed using the Havel-Hakimi algorithm. Parameters ---------- deg_sequence: list of integers Each integer corresponds to the degree of a node (need not be sorted). create_using : graph, optional (default Graph) Return graph of this type. The instance will be cleared. Directed graphs are not allowed. Raises ------ NetworkXException For a non-graphical degree sequence (i.e. one not realizable by some simple graph). Notes ----- The Havel-Hakimi algorithm constructs a simple graph by successively connecting the node of highest degree to other nodes of highest degree, resorting remaining nodes by degree, and repeating the process. The resulting graph has a high degree-associativity. Nodes are labeled 1,.., len(deg_sequence), corresponding to their position in deg_sequence. The basic algorithm is from Hakimi [1]_ and was generalized by Kleitman and Wang [2]_. References ---------- .. [1] Hakimi S., On Realizability of a Set of Integers as Degrees of the Vertices of a Linear Graph. I, Journal of SIAM, 10(3), pp. 496-506 (1962) .. [2] Kleitman D.J. and Wang D.L. Algorithms for Constructing Graphs and Digraphs with Given Valences and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) """ if not nx.is_valid_degree_sequence(deg_sequence): raise nx.NetworkXError('Invalid degree sequence') if create_using is not None: if create_using.is_directed(): raise nx.NetworkXError("Directed graphs are not supported") p = len(deg_sequence) G=nx.empty_graph(p,create_using) num_degs = [] for i in range(p): num_degs.append([]) dmax, dsum, n = 0, 0, 0 for d in deg_sequence: # Process only the non-zero integers if d>0: num_degs[d].append(n) dmax, dsum, n = max(dmax,d), dsum+d, n+1 # Return graph if no edges if n==0: return G modstubs = [(0,0)]*(dmax+1) # Successively reduce degree sequence by removing the maximum degree while n > 0: # Retrieve the maximum degree in the sequence while len(num_degs[dmax]) == 0: dmax -= 1; # If there are not enough stubs to connect to, then the sequence is # not graphical if dmax > n-1: raise nx.NetworkXError('Non-graphical integer sequence') # Remove largest stub in list source = num_degs[dmax].pop() n -= 1 # Reduce the next dmax largest stubs mslen = 0 k = dmax for i in range(dmax): while len(num_degs[k]) == 0: k -= 1 target = num_degs[k].pop() G.add_edge(source, target) n -= 1 if k > 1: modstubs[mslen] = (k-1,target) mslen += 1 # Add back to the list any nonzero stubs that were removed for i in range(mslen): (stubval, stubtarget) = modstubs[i] num_degs[stubval].append(stubtarget) n += 1 G.name="havel_hakimi_graph %d nodes %d edges"%(G.order(),G.size()) return G def directed_havel_hakimi_graph(in_deg_sequence, out_deg_sequence, create_using=None): """Return a directed graph with the given degree sequences. Parameters ---------- in_deg_sequence : list of integers Each list entry corresponds to the in-degree of a node. out_deg_sequence : list of integers Each list entry corresponds to the out-degree of a node. create_using : graph, optional (default DiGraph) Return graph of this type. The instance will be cleared. Returns ------- G : DiGraph A graph with the specified degree sequences. Nodes are labeled starting at 0 with an index corresponding to the position in deg_sequence Raises ------ NetworkXError If the degree sequences are not digraphical. See Also -------- configuration_model Notes ----- Algorithm as described by Kleitman and Wang [1]_. References ---------- .. [1] D.J. Kleitman and D.L. Wang Algorithms for Constructing Graphs and Digraphs with Given Valences and Factors Discrete Mathematics, 6(1), pp. 79-88 (1973) """ assert(nx.utils.is_list_of_ints(in_deg_sequence)) assert(nx.utils.is_list_of_ints(out_deg_sequence)) if create_using is None: create_using = nx.DiGraph() # Process the sequences and form two heaps to store degree pairs with # either zero or nonzero out degrees sumin, sumout, nin, nout = 0, 0, len(in_deg_sequence), len(out_deg_sequence) maxn = max(nin, nout) G = nx.empty_graph(maxn,create_using) if maxn==0: return G maxin = 0 stubheap, zeroheap = [ ], [ ] for n in range(maxn): in_deg, out_deg = 0, 0 if n 0: stubheap.append((-1*out_deg, -1*in_deg,n)) elif out_deg > 0: zeroheap.append((-1*out_deg,n)) if sumin != sumout: raise nx.NetworkXError( 'Invalid degree sequences. Sequences must have equal sums.') heapq.heapify(stubheap) heapq.heapify(zeroheap) modstubs = [(0,0,0)]*(maxin+1) # Successively reduce degree sequence by removing the maximum while stubheap: # Remove first value in the sequence with a non-zero in degree (freeout, freein, target) = heapq.heappop(stubheap) freein *= -1 if freein > len(stubheap)+len(zeroheap): raise nx.NetworkXError('Non-digraphical integer sequence') # Attach arcs from the nodes with the most stubs mslen = 0 for i in range(freein): if zeroheap and (not stubheap or stubheap[0][0] > zeroheap[0][0]): (stubout, stubsource) = heapq.heappop(zeroheap) stubin = 0 else: (stubout, stubin, stubsource) = heapq.heappop(stubheap) if stubout == 0: raise nx.NetworkXError('Non-digraphical integer sequence') G.add_edge(stubsource, target) # Check if source is now totally connected if stubout+1<0 or stubin<0: modstubs[mslen] = (stubout+1, stubin, stubsource) mslen += 1 # Add the nodes back to the heaps that still have available stubs for i in range(mslen): stub = modstubs[i] if stub[1] < 0: heapq.heappush(stubheap, stub) else: heapq.heappush(zeroheap, (stub[0], stub[2])) if freeout<0: heapq.heappush(zeroheap, (freeout, target)) G.name="directed_havel_hakimi_graph %d nodes %d edges"%(G.order(),G.size()) return G def degree_sequence_tree(deg_sequence,create_using=None): """Make a tree for the given degree sequence. A tree has #nodes-#edges=1 so the degree sequence must have len(deg_sequence)-sum(deg_sequence)/2=1 """ if not len(deg_sequence)-sum(deg_sequence)/2.0 == 1.0: raise nx.NetworkXError("Degree sequence invalid") if create_using is not None and create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") # single node tree if len(deg_sequence)==1: G=nx.empty_graph(0,create_using) return G # all degrees greater than 1 deg=[s for s in deg_sequence if s>1] deg.sort(reverse=True) # make path graph as backbone n=len(deg)+2 G=nx.path_graph(n,create_using) last=n # add the leaves for source in range(1,n-1): nedges=deg.pop()-2 for target in range(last,last+nedges): G.add_edge(source, target) last+=nedges # in case we added one too many if len(G.degree())>len(deg_sequence): G.remove_node(0) return G def random_degree_sequence_graph(sequence, seed=None, tries=10): r"""Return a simple random graph with the given degree sequence. If the maximum degree `d_m` in the sequence is `O(m^{1/4})` then the algorithm produces almost uniform random graphs in `O(m d_m)` time where `m` is the number of edges. Parameters ---------- sequence : list of integers Sequence of degrees seed : hashable object, optional Seed for random number generator tries : int, optional Maximum number of tries to create a graph Returns ------- G : Graph A graph with the specified degree sequence. Nodes are labeled starting at 0 with an index corresponding to the position in the sequence. Raises ------ NetworkXUnfeasible If the degree sequence is not graphical. NetworkXError If a graph is not produced in specified number of tries See Also -------- is_valid_degree_sequence, configuration_model Notes ----- The generator algorithm [1]_ is not guaranteed to produce a graph. References ---------- .. [1] Moshen Bayati, Jeong Han Kim, and Amin Saberi, A sequential algorithm for generating random graphs. Algorithmica, Volume 58, Number 4, 860-910, DOI: 10.1007/s00453-009-9340-1 Examples -------- >>> sequence = [1, 2, 2, 3] >>> G = nx.random_degree_sequence_graph(sequence) >>> sorted(G.degree().values()) [1, 2, 2, 3] """ DSRG = DegreeSequenceRandomGraph(sequence, seed=seed) for try_n in range(tries): try: return DSRG.generate() except nx.NetworkXUnfeasible: pass raise nx.NetworkXError('failed to generate graph in %d tries'%tries) class DegreeSequenceRandomGraph(object): # class to generate random graphs with a given degree sequence # use random_degree_sequence_graph() def __init__(self, degree, seed=None): if not nx.is_valid_degree_sequence(degree): raise nx.NetworkXUnfeasible('degree sequence is not graphical') if seed is not None: random.seed(seed) self.degree = list(degree) # node labels are integers 0,...,n-1 self.m = sum(self.degree)/2.0 # number of edges try: self.dmax = max(self.degree) # maximum degree except ValueError: self.dmax = 0 def generate(self): # remaining_degree is mapping from int->remaining degree self.remaining_degree = dict(enumerate(self.degree)) # add all nodes to make sure we get isolated nodes self.graph = nx.Graph() self.graph.add_nodes_from(self.remaining_degree) # remove zero degree nodes for n,d in list(self.remaining_degree.items()): if d == 0: del self.remaining_degree[n] if len(self.remaining_degree) > 0: # build graph in three phases according to how many unmatched edges self.phase1() self.phase2() self.phase3() return self.graph def update_remaining(self, u, v, aux_graph=None): # decrement remaining nodes, modify auxilliary graph if in phase3 if aux_graph is not None: # remove edges from auxilliary graph aux_graph.remove_edge(u,v) if self.remaining_degree[u] == 1: del self.remaining_degree[u] if aux_graph is not None: aux_graph.remove_node(u) else: self.remaining_degree[u] -= 1 if self.remaining_degree[v] == 1: del self.remaining_degree[v] if aux_graph is not None: aux_graph.remove_node(v) else: self.remaining_degree[v] -= 1 def p(self,u,v): # degree probability return 1 - self.degree[u]*self.degree[v]/(4.0*self.m) def q(self,u,v): # remaining degree probability norm = float(max(self.remaining_degree.values()))**2 return self.remaining_degree[u]*self.remaining_degree[v]/norm def suitable_edge(self): # Check if there is a suitable edge that is not in the graph # True if an (arbitrary) remaining node has at least one possible # connection to another remaining node nodes = iter(self.remaining_degree) u = next(nodes) # one arbitrary node for v in nodes: # loop over all other remaining nodes if not self.graph.has_edge(u, v): return True return False def phase1(self): # choose node pairs from (degree) weighted distribution while sum(self.remaining_degree.values()) >= 2 * self.dmax**2: u,v = sorted(random_weighted_sample(self.remaining_degree, 2)) if self.graph.has_edge(u,v): continue if random.random() < self.p(u,v): # accept edge self.graph.add_edge(u,v) self.update_remaining(u,v) def phase2(self): # choose remaining nodes uniformly at random and use rejection sampling while len(self.remaining_degree) >= 2 * self.dmax: norm = float(max(self.remaining_degree.values()))**2 while True: u,v = sorted(random.sample(self.remaining_degree.keys(), 2)) if self.graph.has_edge(u,v): continue if random.random() < self.q(u,v): break if random.random() < self.p(u,v): # accept edge self.graph.add_edge(u,v) self.update_remaining(u,v) def phase3(self): # build potential remaining edges and choose with rejection sampling potential_edges = combinations(self.remaining_degree, 2) # build auxilliary graph of potential edges not already in graph H = nx.Graph([(u,v) for (u,v) in potential_edges if not self.graph.has_edge(u,v)]) while self.remaining_degree: if not self.suitable_edge(): raise nx.NetworkXUnfeasible('no suitable edges left') while True: u,v = sorted(random.choice(H.edges())) if random.random() < self.q(u,v): break if random.random() < self.p(u,v): # accept edge self.graph.add_edge(u,v) self.update_remaining(u,v, aux_graph=H) networkx-1.8.1/networkx/generators/directed.py0000664000175000017500000002242212177456333021466 0ustar aricaric00000000000000""" Generators for some directed graphs. gn_graph: growing network gnc_graph: growing network with copying gnr_graph: growing network with redirection scale_free_graph: scale free directed graph """ # Copyright (C) 2006-2009 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ ="""Aric Hagberg (hagberg@lanl.gov)\nWillem Ligtenberg (W.P.A.Ligtenberg@tue.nl)""" __all__ = ['gn_graph', 'gnc_graph', 'gnr_graph','scale_free_graph'] import random import networkx as nx from networkx.generators.classic import empty_graph from networkx.utils import discrete_sequence def gn_graph(n,kernel=None,create_using=None,seed=None): """Return the GN digraph with n nodes. The GN (growing network) graph is built by adding nodes one at a time with a link to one previously added node. The target node for the link is chosen with probability based on degree. The default attachment kernel is a linear function of degree. The graph is always a (directed) tree. Parameters ---------- n : int The number of nodes for the generated graph. kernel : function The attachment kernel. create_using : graph, optional (default DiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional The seed for the random number generator. Examples -------- >>> D=nx.gn_graph(10) # the GN graph >>> G=D.to_undirected() # the undirected version To specify an attachment kernel use the kernel keyword >>> D=nx.gn_graph(10,kernel=lambda x:x**1.5) # A_k=k^1.5 References ---------- .. [1] P. L. Krapivsky and S. Redner, Organization of Growing Random Networks, Phys. Rev. E, 63, 066123, 2001. """ if create_using is None: create_using = nx.DiGraph() elif not create_using.is_directed(): raise nx.NetworkXError("Directed Graph required in create_using") if kernel is None: kernel = lambda x: x if seed is not None: random.seed(seed) G=empty_graph(1,create_using) G.name="gn_graph(%s)"%(n) if n==1: return G G.add_edge(1,0) # get started ds=[1,1] # degree sequence for source in range(2,n): # compute distribution from kernel and degree dist=[kernel(d) for d in ds] # choose target from discrete distribution target=discrete_sequence(1,distribution=dist)[0] G.add_edge(source,target) ds.append(1) # the source has only one link (degree one) ds[target]+=1 # add one to the target link degree return G def gnr_graph(n,p,create_using=None,seed=None): """Return the GNR digraph with n nodes and redirection probability p. The GNR (growing network with redirection) graph is built by adding nodes one at a time with a link to one previously added node. The previous target node is chosen uniformly at random. With probabiliy p the link is instead "redirected" to the successor node of the target. The graph is always a (directed) tree. Parameters ---------- n : int The number of nodes for the generated graph. p : float The redirection probability. create_using : graph, optional (default DiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional The seed for the random number generator. Examples -------- >>> D=nx.gnr_graph(10,0.5) # the GNR graph >>> G=D.to_undirected() # the undirected version References ---------- .. [1] P. L. Krapivsky and S. Redner, Organization of Growing Random Networks, Phys. Rev. E, 63, 066123, 2001. """ if create_using is None: create_using = nx.DiGraph() elif not create_using.is_directed(): raise nx.NetworkXError("Directed Graph required in create_using") if not seed is None: random.seed(seed) G=empty_graph(1,create_using) G.name="gnr_graph(%s,%s)"%(n,p) if n==1: return G for source in range(1,n): target=random.randrange(0,source) if random.random() < p and target !=0: target=G.successors(target)[0] G.add_edge(source,target) return G def gnc_graph(n,create_using=None,seed=None): """Return the GNC digraph with n nodes. The GNC (growing network with copying) graph is built by adding nodes one at a time with a links to one previously added node (chosen uniformly at random) and to all of that node's successors. Parameters ---------- n : int The number of nodes for the generated graph. create_using : graph, optional (default DiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional The seed for the random number generator. References ---------- .. [1] P. L. Krapivsky and S. Redner, Network Growth by Copying, Phys. Rev. E, 71, 036118, 2005k.}, """ if create_using is None: create_using = nx.DiGraph() elif not create_using.is_directed(): raise nx.NetworkXError("Directed Graph required in create_using") if not seed is None: random.seed(seed) G=empty_graph(1,create_using) G.name="gnc_graph(%s)"%(n) if n==1: return G for source in range(1,n): target=random.randrange(0,source) for succ in G.successors(target): G.add_edge(source,succ) G.add_edge(source,target) return G def scale_free_graph(n, alpha=0.41, beta=0.54, gamma=0.05, delta_in=0.2, delta_out=0, create_using=None, seed=None): """Return a scale free directed graph. Parameters ---------- n : integer Number of nodes in graph alpha : float Probability for adding a new node connected to an existing node chosen randomly according to the in-degree distribution. beta : float Probability for adding an edge between two existing nodes. One existing node is chosen randomly according the in-degree distribution and the other chosen randomly according to the out-degree distribution. gamma : float Probability for adding a new node conecgted to an existing node chosen randomly according to the out-degree distribution. delta_in : float Bias for choosing ndoes from in-degree distribution. delta_out : float Bias for choosing ndoes from out-degree distribution. create_using : graph, optional (default MultiDiGraph) Use this graph instance to start the process (default=3-cycle). seed : integer, optional Seed for random number generator Examples -------- >>> G=nx.scale_free_graph(100) Notes ----- The sum of alpha, beta, and gamma must be 1. References ---------- .. [1] B. Bollob{\'a}s, C. Borgs, J. Chayes, and O. Riordan, Directed scale-free graphs, Proceedings of the fourteenth annual ACM-SIAM symposium on Discrete algorithms, 132--139, 2003. """ def _choose_node(G,distribution,delta): cumsum=0.0 # normalization psum=float(sum(distribution.values()))+float(delta)*len(distribution) r=random.random() for i in range(0,len(distribution)): cumsum+=(distribution[i]+delta)/psum if r < cumsum: break return i if create_using is None: # start with 3-cycle G = nx.MultiDiGraph() G.add_edges_from([(0,1),(1,2),(2,0)]) else: # keep existing graph structure? G = create_using if not (G.is_directed() and G.is_multigraph()): raise nx.NetworkXError(\ "MultiDiGraph required in create_using") if alpha <= 0: raise ValueError('alpha must be >= 0.') if beta <= 0: raise ValueError('beta must be >= 0.') if gamma <= 0: raise ValueError('beta must be >= 0.') if alpha+beta+gamma !=1.0: raise ValueError('alpha+beta+gamma must equal 1.') G.name="directed_scale_free_graph(%s,alpha=%s,beta=%s,gamma=%s,delta_in=%s,delta_out=%s)"%(n,alpha,beta,gamma,delta_in,delta_out) # seed random number generated (uses None as default) random.seed(seed) while len(G)>> import numpy >>> a=numpy.reshape(numpy.random.random_integers(0,1,size=100),(10,10)) >>> D=nx.DiGraph(a) or equivalently >>> D=nx.to_networkx_graph(a,create_using=nx.DiGraph()) Create a graph with a single edge from a dictionary of dictionaries >>> d={0: {1: 1}} # dict-of-dicts single edge (0,1) >>> G=nx.Graph(d) See Also -------- nx_pygraphviz, nx_pydot """ __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Pieter Swart (swart@lanl.gov)', 'Dan Schult(dschult@colgate.edu)']) # Copyright (C) 2006-2011 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import warnings import networkx as nx __all__ = ['to_networkx_graph', 'from_dict_of_dicts', 'to_dict_of_dicts', 'from_dict_of_lists', 'to_dict_of_lists', 'from_edgelist', 'to_edgelist', 'from_numpy_matrix', 'to_numpy_matrix', 'to_numpy_recarray', 'from_scipy_sparse_matrix', 'to_scipy_sparse_matrix'] def _prep_create_using(create_using): """Return a graph object ready to be populated. If create_using is None return the default (just networkx.Graph()) If create_using.clear() works, assume it returns a graph object. Otherwise raise an exception because create_using is not a networkx graph. """ if create_using is None: G=nx.Graph() else: G=create_using try: G.clear() except: raise TypeError("Input graph is not a networkx graph type") return G def to_networkx_graph(data,create_using=None,multigraph_input=False): """Make a NetworkX graph from a known data structure. The preferred way to call this is automatically from the class constructor >>> d={0: {1: {'weight':1}}} # dict-of-dicts single edge (0,1) >>> G=nx.Graph(d) instead of the equivalent >>> G=nx.from_dict_of_dicts(d) Parameters ---------- data : a object to be converted Current known types are: any NetworkX graph dict-of-dicts dist-of-lists list of edges numpy matrix numpy ndarray scipy sparse matrix pygraphviz agraph create_using : NetworkX graph Use specified graph for result. Otherwise a new graph is created. multigraph_input : bool (default False) If True and data is a dict_of_dicts, try to create a multigraph assuming dict_of_dict_of_lists. If data and create_using are both multigraphs then create a multigraph from a multigraph. """ # NX graph if hasattr(data,"adj"): try: result= from_dict_of_dicts(data.adj,\ create_using=create_using,\ multigraph_input=data.is_multigraph()) if hasattr(data,'graph') and isinstance(data.graph,dict): result.graph=data.graph.copy() if hasattr(data,'node') and isinstance(data.node,dict): result.node=dict( (n,dd.copy()) for n,dd in data.node.items() ) return result except: raise nx.NetworkXError("Input is not a correct NetworkX graph.") # pygraphviz agraph if hasattr(data,"is_strict"): try: return nx.from_agraph(data,create_using=create_using) except: raise nx.NetworkXError("Input is not a correct pygraphviz graph.") # dict of dicts/lists if isinstance(data,dict): try: return from_dict_of_dicts(data,create_using=create_using,\ multigraph_input=multigraph_input) except: try: return from_dict_of_lists(data,create_using=create_using) except: raise TypeError("Input is not known type.") # list or generator of edges if (isinstance(data,list) or hasattr(data,'next') or hasattr(data, '__next__')): try: return from_edgelist(data,create_using=create_using) except: raise nx.NetworkXError("Input is not a valid edge list") # numpy matrix or ndarray try: import numpy if isinstance(data,numpy.matrix) or \ isinstance(data,numpy.ndarray): try: return from_numpy_matrix(data,create_using=create_using) except: raise nx.NetworkXError(\ "Input is not a correct numpy matrix or array.") except ImportError: warnings.warn('numpy not found, skipping conversion test.', ImportWarning) # scipy sparse matrix - any format try: import scipy if hasattr(data,"format"): try: return from_scipy_sparse_matrix(data,create_using=create_using) except: raise nx.NetworkXError(\ "Input is not a correct scipy sparse matrix type.") except ImportError: warnings.warn('scipy not found, skipping conversion test.', ImportWarning) raise nx.NetworkXError(\ "Input is not a known data type for conversion.") return def convert_to_undirected(G): """Return a new undirected representation of the graph G.""" return G.to_undirected() def convert_to_directed(G): """Return a new directed representation of the graph G.""" return G.to_directed() def to_dict_of_lists(G,nodelist=None): """Return adjacency representation of graph as a dictionary of lists. Parameters ---------- G : graph A NetworkX graph nodelist : list Use only nodes specified in nodelist Notes ----- Completely ignores edge data for MultiGraph and MultiDiGraph. """ if nodelist is None: nodelist=G d = {} for n in nodelist: d[n]=[nbr for nbr in G.neighbors(n) if nbr in nodelist] return d def from_dict_of_lists(d,create_using=None): """Return a graph from a dictionary of lists. Parameters ---------- d : dictionary of lists A dictionary of lists adjacency representation. create_using : NetworkX graph Use specified graph for result. Otherwise a new graph is created. Examples -------- >>> dol= {0:[1]} # single edge (0,1) >>> G=nx.from_dict_of_lists(dol) or >>> G=nx.Graph(dol) # use Graph constructor """ G=_prep_create_using(create_using) G.add_nodes_from(d) if G.is_multigraph() and not G.is_directed(): # a dict_of_lists can't show multiedges. BUT for undirected graphs, # each edge shows up twice in the dict_of_lists. # So we need to treat this case separately. seen={} for node,nbrlist in d.items(): for nbr in nbrlist: if nbr not in seen: G.add_edge(node,nbr) seen[node]=1 # don't allow reverse edge to show up else: G.add_edges_from( ((node,nbr) for node,nbrlist in d.items() for nbr in nbrlist) ) return G def to_dict_of_dicts(G,nodelist=None,edge_data=None): """Return adjacency representation of graph as a dictionary of dictionaries. Parameters ---------- G : graph A NetworkX graph nodelist : list Use only nodes specified in nodelist edge_data : list, optional If provided, the value of the dictionary will be set to edge_data for all edges. This is useful to make an adjacency matrix type representation with 1 as the edge data. If edgedata is None, the edgedata in G is used to fill the values. If G is a multigraph, the edgedata is a dict for each pair (u,v). """ dod={} if nodelist is None: if edge_data is None: for u,nbrdict in G.adjacency_iter(): dod[u]=nbrdict.copy() else: # edge_data is not None for u,nbrdict in G.adjacency_iter(): dod[u]=dod.fromkeys(nbrdict, edge_data) else: # nodelist is not None if edge_data is None: for u in nodelist: dod[u]={} for v,data in ((v,data) for v,data in G[u].items() if v in nodelist): dod[u][v]=data else: # nodelist and edge_data are not None for u in nodelist: dod[u]={} for v in ( v for v in G[u] if v in nodelist): dod[u][v]=edge_data return dod def from_dict_of_dicts(d,create_using=None,multigraph_input=False): """Return a graph from a dictionary of dictionaries. Parameters ---------- d : dictionary of dictionaries A dictionary of dictionaries adjacency representation. create_using : NetworkX graph Use specified graph for result. Otherwise a new graph is created. multigraph_input : bool (default False) When True, the values of the inner dict are assumed to be containers of edge data for multiple edges. Otherwise this routine assumes the edge data are singletons. Examples -------- >>> dod= {0: {1:{'weight':1}}} # single edge (0,1) >>> G=nx.from_dict_of_dicts(dod) or >>> G=nx.Graph(dod) # use Graph constructor """ G=_prep_create_using(create_using) G.add_nodes_from(d) # is dict a MultiGraph or MultiDiGraph? if multigraph_input: # make a copy of the list of edge data (but not the edge data) if G.is_directed(): if G.is_multigraph(): G.add_edges_from( (u,v,key,data) for u,nbrs in d.items() for v,datadict in nbrs.items() for key,data in datadict.items() ) else: G.add_edges_from( (u,v,data) for u,nbrs in d.items() for v,datadict in nbrs.items() for key,data in datadict.items() ) else: # Undirected if G.is_multigraph(): seen=set() # don't add both directions of undirected graph for u,nbrs in d.items(): for v,datadict in nbrs.items(): if (u,v) not in seen: G.add_edges_from( (u,v,key,data) for key,data in datadict.items() ) seen.add((v,u)) else: seen=set() # don't add both directions of undirected graph for u,nbrs in d.items(): for v,datadict in nbrs.items(): if (u,v) not in seen: G.add_edges_from( (u,v,data) for key,data in datadict.items() ) seen.add((v,u)) else: # not a multigraph to multigraph transfer if G.is_multigraph() and not G.is_directed(): # d can have both representations u-v, v-u in dict. Only add one. # We don't need this check for digraphs since we add both directions, # or for Graph() since it is done implicitly (parallel edges not allowed) seen=set() for u,nbrs in d.items(): for v,data in nbrs.items(): if (u,v) not in seen: G.add_edge(u,v,attr_dict=data) seen.add((v,u)) else: G.add_edges_from( ( (u,v,data) for u,nbrs in d.items() for v,data in nbrs.items()) ) return G def to_edgelist(G,nodelist=None): """Return a list of edges in the graph. Parameters ---------- G : graph A NetworkX graph nodelist : list Use only nodes specified in nodelist """ if nodelist is None: return G.edges(data=True) else: return G.edges(nodelist,data=True) def from_edgelist(edgelist,create_using=None): """Return a graph from a list of edges. Parameters ---------- edgelist : list or iterator Edge tuples create_using : NetworkX graph Use specified graph for result. Otherwise a new graph is created. Examples -------- >>> edgelist= [(0,1)] # single edge (0,1) >>> G=nx.from_edgelist(edgelist) or >>> G=nx.Graph(edgelist) # use Graph constructor """ G=_prep_create_using(create_using) G.add_edges_from(edgelist) return G def to_numpy_matrix(G, nodelist=None, dtype=None, order=None, multigraph_weight=sum, weight='weight'): """Return the graph adjacency matrix as a NumPy matrix. Parameters ---------- G : graph The NetworkX graph used to construct the NumPy matrix. nodelist : list, optional The rows and columns are ordered according to the nodes in `nodelist`. If `nodelist` is None, then the ordering is produced by G.nodes(). dtype : NumPy data type, optional A valid single NumPy data type used to initialize the array. This must be a simple type such as int or numpy.float64 and not a compound data type (see to_numpy_recarray) If None, then the NumPy default is used. order : {'C', 'F'}, optional Whether to store multidimensional data in C- or Fortran-contiguous (row- or column-wise) order in memory. If None, then the NumPy default is used. multigraph_weight : {sum, min, max}, optional An operator that determines how weights in multigraphs are handled. The default is to sum the weights of the multiple edges. weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for the edge weight. If None then all edge weights are 1. Returns ------- M : NumPy matrix Graph adjacency matrix. See Also -------- to_numpy_recarray, from_numpy_matrix Notes ----- The matrix entries are assigned with weight edge attribute. When an edge does not have the weight attribute, the value of the entry is 1. For multiple edges, the values of the entries are the sums of the edge attributes for each edge. When `nodelist` does not contain every node in `G`, the matrix is built from the subgraph of `G` that is induced by the nodes in `nodelist`. Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_edge(0,1,weight=2) >>> G.add_edge(1,0) >>> G.add_edge(2,2,weight=3) >>> G.add_edge(2,2) >>> nx.to_numpy_matrix(G, nodelist=[0,1,2]) matrix([[ 0., 2., 0.], [ 1., 0., 0.], [ 0., 0., 4.]]) """ try: import numpy as np except ImportError: raise ImportError(\ "to_numpy_matrix() requires numpy: http://scipy.org/ ") if nodelist is None: nodelist = G.nodes() nodeset = set(nodelist) if len(nodelist) != len(nodeset): msg = "Ambiguous ordering: `nodelist` contained duplicates." raise nx.NetworkXError(msg) nlen=len(nodelist) undirected = not G.is_directed() index=dict(zip(nodelist,range(nlen))) if G.is_multigraph(): # Handle MultiGraphs and MultiDiGraphs # array of nan' to start with, any leftover nans will be converted to 0 # nans are used so we can use sum, min, max for multigraphs M = np.zeros((nlen,nlen), dtype=dtype, order=order)+np.nan # use numpy nan-aware operations operator={sum:np.nansum, min:np.nanmin, max:np.nanmax} try: op=operator[multigraph_weight] except: raise ValueError('multigraph_weight must be sum, min, or max') for u,v,attrs in G.edges_iter(data=True): if (u in nodeset) and (v in nodeset): i,j = index[u],index[v] e_weight = attrs.get(weight, 1) M[i,j] = op([e_weight,M[i,j]]) if undirected: M[j,i] = M[i,j] # convert any nans to zeros M = np.asmatrix(np.nan_to_num(M)) else: # Graph or DiGraph, this is much faster than above M = np.zeros((nlen,nlen), dtype=dtype, order=order) for u,nbrdict in G.adjacency_iter(): for v,d in nbrdict.items(): try: M[index[u],index[v]]=d.get(weight,1) except KeyError: pass M = np.asmatrix(M) return M def from_numpy_matrix(A,create_using=None): """Return a graph from numpy matrix. The numpy matrix is interpreted as an adjacency matrix for the graph. Parameters ---------- A : numpy matrix An adjacency matrix representation of a graph create_using : NetworkX graph Use specified graph for result. The default is Graph() Notes ----- If the numpy matrix has a single data type for each matrix entry it will be converted to an appropriate Python data type. If the numpy matrix has a user-specified compound data type the names of the data fields will be used as attribute keys in the resulting NetworkX graph. See Also -------- to_numpy_matrix, to_numpy_recarray Examples -------- Simple integer weights on edges: >>> import numpy >>> A=numpy.matrix([[1,1],[2,1]]) >>> G=nx.from_numpy_matrix(A) User defined compound data type on edges: >>> import numpy >>> dt=[('weight',float),('cost',int)] >>> A=numpy.matrix([[(1.0,2)]],dtype=dt) >>> G=nx.from_numpy_matrix(A) >>> G.edges(data=True) [(0, 0, {'cost': 2, 'weight': 1.0})] """ kind_to_python_type={'f':float, 'i':int, 'u':int, 'b':bool, 'c':complex, 'S':str, 'V':'void'} try: # Python 3.x blurb = chr(1245) # just to trigger the exception kind_to_python_type['U']=str except ValueError: # Python 2.6+ kind_to_python_type['U']=unicode # This should never fail if you have created a numpy matrix with numpy... try: import numpy as np except ImportError: raise ImportError(\ "from_numpy_matrix() requires numpy: http://scipy.org/ ") G=_prep_create_using(create_using) n,m=A.shape if n!=m: raise nx.NetworkXError("Adjacency matrix is not square.", "nx,ny=%s"%(A.shape,)) dt=A.dtype try: python_type=kind_to_python_type[dt.kind] except: raise TypeError("Unknown numpy data type: %s"%dt) # make sure we get isolated nodes G.add_nodes_from(range(n)) # get a list of edges x,y=np.asarray(A).nonzero() # handle numpy constructed data type if python_type is 'void': fields=sorted([(offset,dtype,name) for name,(dtype,offset) in A.dtype.fields.items()]) for (u,v) in zip(x,y): attr={} for (offset,dtype,name),val in zip(fields,A[u,v]): attr[name]=kind_to_python_type[dtype.kind](val) G.add_edge(u,v,attr) else: # basic data type G.add_edges_from( ((u,v,{'weight':python_type(A[u,v])}) for (u,v) in zip(x,y)) ) return G def to_numpy_recarray(G,nodelist=None, dtype=[('weight',float)], order=None): """Return the graph adjacency matrix as a NumPy recarray. Parameters ---------- G : graph The NetworkX graph used to construct the NumPy matrix. nodelist : list, optional The rows and columns are ordered according to the nodes in `nodelist`. If `nodelist` is None, then the ordering is produced by G.nodes(). dtype : NumPy data-type, optional A valid NumPy named dtype used to initialize the NumPy recarray. The data type names are assumed to be keys in the graph edge attribute dictionary. order : {'C', 'F'}, optional Whether to store multidimensional data in C- or Fortran-contiguous (row- or column-wise) order in memory. If None, then the NumPy default is used. Returns ------- M : NumPy recarray The graph with specified edge data as a Numpy recarray Notes ----- When `nodelist` does not contain every node in `G`, the matrix is built from the subgraph of `G` that is induced by the nodes in `nodelist`. Examples -------- >>> G = nx.Graph() >>> G.add_edge(1,2,weight=7.0,cost=5) >>> A=nx.to_numpy_recarray(G,dtype=[('weight',float),('cost',int)]) >>> print(A.weight) [[ 0. 7.] [ 7. 0.]] >>> print(A.cost) [[0 5] [5 0]] """ try: import numpy as np except ImportError: raise ImportError(\ "to_numpy_matrix() requires numpy: http://scipy.org/ ") if G.is_multigraph(): raise nx.NetworkXError("Not implemented for multigraphs.") if nodelist is None: nodelist = G.nodes() nodeset = set(nodelist) if len(nodelist) != len(nodeset): msg = "Ambiguous ordering: `nodelist` contained duplicates." raise nx.NetworkXError(msg) nlen=len(nodelist) undirected = not G.is_directed() index=dict(zip(nodelist,range(nlen))) M = np.zeros((nlen,nlen), dtype=dtype, order=order) names=M.dtype.names for u,v,attrs in G.edges_iter(data=True): if (u in nodeset) and (v in nodeset): i,j = index[u],index[v] values=tuple([attrs[n] for n in names]) M[i,j] = values if undirected: M[j,i] = M[i,j] return M.view(np.recarray) def to_scipy_sparse_matrix(G, nodelist=None, dtype=None, weight='weight', format='csr'): """Return the graph adjacency matrix as a SciPy sparse matrix. Parameters ---------- G : graph The NetworkX graph used to construct the NumPy matrix. nodelist : list, optional The rows and columns are ordered according to the nodes in `nodelist`. If `nodelist` is None, then the ordering is produced by G.nodes(). dtype : NumPy data-type, optional A valid NumPy dtype used to initialize the array. If None, then the NumPy default is used. weight : string or None optional (default='weight') The edge attribute that holds the numerical value used for the edge weight. If None then all edge weights are 1. format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'} The type of the matrix to be returned (default 'csr'). For some algorithms different implementations of sparse matrices can perform better. See [1]_ for details. Returns ------- M : SciPy sparse matrix Graph adjacency matrix. Notes ----- The matrix entries are populated using the edge attribute held in parameter weight. When an edge does not have that attribute, the value of the entry is 1. For multiple edges the matrix values are the sums of the edge weights. When `nodelist` does not contain every node in `G`, the matrix is built from the subgraph of `G` that is induced by the nodes in `nodelist`. Uses coo_matrix format. To convert to other formats specify the format= keyword. Examples -------- >>> G = nx.MultiDiGraph() >>> G.add_edge(0,1,weight=2) >>> G.add_edge(1,0) >>> G.add_edge(2,2,weight=3) >>> G.add_edge(2,2) >>> S = nx.to_scipy_sparse_matrix(G, nodelist=[0,1,2]) >>> print(S.todense()) [[0 2 0] [1 0 0] [0 0 4]] References ---------- .. [1] Scipy Dev. References, "Sparse Matrices", http://docs.scipy.org/doc/scipy/reference/sparse.html """ try: from scipy import sparse except ImportError: raise ImportError(\ "to_scipy_sparse_matrix() requires scipy: http://scipy.org/ ") if nodelist is None: nodelist = G nlen = len(nodelist) if nlen == 0: raise nx.NetworkXError("Graph has no nodes or edges") if len(nodelist) != len(set(nodelist)): msg = "Ambiguous ordering: `nodelist` contained duplicates." raise nx.NetworkXError(msg) index = dict(zip(nodelist,range(nlen))) if G.number_of_edges() == 0: row,col,data=[],[],[] else: row,col,data=zip(*((index[u],index[v],d.get(weight,1)) for u,v,d in G.edges_iter(nodelist, data=True) if u in index and v in index)) if G.is_directed(): M = sparse.coo_matrix((data,(row,col)),shape=(nlen,nlen), dtype=dtype) else: # symmetrize matrix M = sparse.coo_matrix((data+data,(row+col,col+row)),shape=(nlen,nlen), dtype=dtype) try: return M.asformat(format) except AttributeError: raise nx.NetworkXError("Unknown sparse matrix format: %s"%format) def from_scipy_sparse_matrix(A,create_using=None): """Return a graph from scipy sparse matrix adjacency list. Parameters ---------- A : scipy sparse matrix An adjacency matrix representation of a graph create_using : NetworkX graph Use specified graph for result. The default is Graph() Examples -------- >>> import scipy.sparse >>> A=scipy.sparse.eye(2,2,1) >>> G=nx.from_scipy_sparse_matrix(A) """ G=_prep_create_using(create_using) # convert all formats to lil - not the most efficient way AA=A.tolil() n,m=AA.shape if n!=m: raise nx.NetworkXError(\ "Adjacency matrix is not square. nx,ny=%s"%(A.shape,)) G.add_nodes_from(range(n)) # make sure we get isolated nodes for i,row in enumerate(AA.rows): for pos,j in enumerate(row): G.add_edge(i,j,**{'weight':AA.data[i][pos]}) return G # fixture for nose tests def setup_module(module): from nose import SkipTest try: import numpy except: raise SkipTest("NumPy not available") try: import scipy except: raise SkipTest("SciPy not available") networkx-1.8.1/setup_egg.py0000775000175000017500000000267412177456333015645 0ustar aricaric00000000000000 #!/usr/bin/env python """ An alternate setup.py script that uses setuptools. You can install networkx with python setup_egg.py install If you have setuptools and run this as python setup_egg.py bdist_egg you will get a Python egg. Use python setup_egg.py nosetests to run the tests. """ # local import, might need modification for 2.6/3.0 from setup import * # must occur after local import to override distutils.core.setup from setuptools import setup if __name__ == "__main__": setup( name = release.name.lower(), version = version, maintainer = release.maintainer, maintainer_email = release.maintainer_email, author = release.authors['Hagberg'][0], author_email = release.authors['Hagberg'][1], description = release.description, keywords = release.keywords, long_description = release.long_description, license = release.license, platforms = release.platforms, url = release.url, download_url = release.download_url, classifiers = release.classifiers, packages = packages, data_files = data, package_data = package_data, install_requires=['setuptools'], test_suite = 'nose.collector', tests_require = ['nose >= 0.10.1'] , zip_safe = False ) networkx-1.8.1/examples/0000775000175000017500000000000012177457361015115 5ustar aricaric00000000000000networkx-1.8.1/examples/javascript/0000775000175000017500000000000012177457361017263 5ustar aricaric00000000000000networkx-1.8.1/examples/javascript/http_server.py0000664000175000017500000000415412177456333022204 0ustar aricaric00000000000000# helper to load url # runs webserver and loads url with webbrowswer module import sys def load_url(path): PORT = 8000 httpd = StoppableHTTPServer(("127.0.0.1",PORT), handler) thread.start_new_thread(httpd.serve, ()) webbrowser.open_new('http://localhost:%s/%s'%(PORT,path)) input("Press to stop server\n") httpd.stop() print("To restart server run: \n%s"%server) if sys.version_info[0] == 2: import SimpleHTTPServer, BaseHTTPServer import socket import thread import webbrowser handler = SimpleHTTPServer.SimpleHTTPRequestHandler input = raw_input server = "python -m SimpleHTTPServer 8000" class StoppableHTTPServer(BaseHTTPServer.HTTPServer): def server_bind(self): BaseHTTPServer.HTTPServer.server_bind(self) self.socket.settimeout(1) self.run = True def get_request(self): while self.run: try: sock, addr = self.socket.accept() sock.settimeout(None) return (sock, addr) except socket.timeout: pass def stop(self): self.run = False def serve(self): while self.run: self.handle_request() else: import http.server, http.server import socket import _thread as thread import webbrowser handler = http.server.SimpleHTTPRequestHandler server = "python -m http.server 8000" class StoppableHTTPServer(http.server.HTTPServer): def server_bind(self): http.server.HTTPServer.server_bind(self) self.socket.settimeout(1) self.run = True def get_request(self): while self.run: try: sock, addr = self.socket.accept() sock.settimeout(None) return (sock, addr) except socket.timeout: pass def stop(self): self.run = False def serve(self): while self.run: self.handle_request() networkx-1.8.1/examples/javascript/force.py0000664000175000017500000000172712177456333020740 0ustar aricaric00000000000000"""Example of writing JSON format graph data and using the D3 Javascript library to produce an HTML/Javascript drawing. """ # Copyright (C) 2011-2012 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. __author__ = """Aric Hagberg """ import json import networkx as nx from networkx.readwrite import json_graph import http_server G = nx.barbell_graph(6,3) # this d3 example uses the name attribute for the mouse-hover value, # so add a name to each node for n in G: G.node[n]['name'] = n # write json formatted data d = json_graph.node_link_data(G) # node-link format to serialize # write json json.dump(d, open('force/force.json','w')) print('Wrote node-link JSON data to force/force.json') # open URL in running web browser http_server.load_url('force/force.html') print('Or copy all files in force/ to webserver and load force/force.html') networkx-1.8.1/examples/pygraphviz/0000775000175000017500000000000012177457361017320 5ustar aricaric00000000000000networkx-1.8.1/examples/pygraphviz/write_dotfile.py0000664000175000017500000000164612177456333022537 0ustar aricaric00000000000000#!/usr/bin/env python """ Write a dot file from a networkx graph for further processing with graphviz. You need to have either pygraphviz or pydot for this example. See https://networkx.lanl.gov/drawing.html for more info. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as NX # and the following code block is not needed # but we want to see which module is used and # if and why it fails try: m=NX.drawing.write_dot.__module__ except: print print "pygraphviz or pydot were not found " print "see https://networkx.lanl.gov/Drawing.html for info" print raise print "using module", m G=NX.grid_2d_graph(5,5) # 5x5 grid NX.write_dot(G,"grid.dot") print "Now run: neato -Tps grid.dot >grid.ps" networkx-1.8.1/examples/pygraphviz/pygraphviz_attributes.py0000664000175000017500000000177312177456333024351 0ustar aricaric00000000000000#!/usr/bin/env python """ An example showing how to use the interface to the pygraphviz AGraph class to convert to and from graphviz. Also see the pygraphviz documentation and examples at https://networkx.lanl.gov/pygraphviz/ """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2006-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx # networkx graph G=nx.Graph() # ad edges with red color G.add_edge(1,2,color='red') G.add_edge(2,3,color='red') # add nodes 3 and 4 G.add_node(3) G.add_node(4) # convert to a graphviz agraph A=nx.to_agraph(G) # write to dot file A.write('k5_attributes.dot') # convert back to networkx Graph with attributes on edges and # default attributes as dictionary data X=nx.from_agraph(A) print("edges") print(X.edges(data=True)) print("default graph attributes") print(X.graph) print("node node attributes") print(X.node) networkx-1.8.1/examples/pygraphviz/pygraphviz_simple.py0000664000175000017500000000152612177456333023450 0ustar aricaric00000000000000#!/usr/bin/env python """ An example showing how to use the interface to the pygraphviz AGraph class to convert to and from graphviz. Also see the pygraphviz documentation and examples at https://networkx.lanl.gov/pygraphviz/ """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * # plain graph G=complete_graph(5) # start with K5 in networkx A=to_agraph(G) # convert to a graphviz graph X1=from_agraph(A) # convert back to networkx (but as Graph) X2=Graph(A) # fancy way to do conversion G1=Graph(X1) # now make it a Graph A.write('k5.dot') # write to dot file X3=read_dot('k5.dot') # read from dotfile networkx-1.8.1/examples/pygraphviz/pygraphviz_draw.py0000664000175000017500000000127212177456333023112 0ustar aricaric00000000000000#!/usr/bin/env python """ An example showing how to use the interface to the pygraphviz AGraph class to draw a graph. Also see the pygraphviz documentation and examples at https://networkx.lanl.gov/pygraphviz/ """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * # plain graph G=complete_graph(5) # start with K5 in networkx A=to_agraph(G) # convert to a graphviz graph A.layout() # neato layout A.draw("k5.ps") # write postscript in k5.ps with neato layout networkx-1.8.1/examples/basic/0000775000175000017500000000000012177457361016176 5ustar aricaric00000000000000networkx-1.8.1/examples/basic/read_write.py0000664000175000017500000000132312177456333020672 0ustar aricaric00000000000000#!/usr/bin/env python """ Read and write graphs. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * import sys G=grid_2d_graph(5,5) # 5x5 grid try: # Python 2.6+ write_adjlist(G,sys.stdout) # write adjacency list to screen except TypeError: # Python 3.x write_adjlist(G,sys.stdout.buffer) # write adjacency list to screen # write edgelist to grid.edgelist write_edgelist(G,path="grid.edgelist",delimiter=":") # read edgelist from grid.edgelist H=read_edgelist(path="grid.edgelist",delimiter=":") networkx-1.8.1/examples/basic/properties.py0000664000175000017500000000205712177456333020746 0ustar aricaric00000000000000#!/usr/bin/env python """ Compute some network properties for the lollipop graph. """ # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * G = lollipop_graph(4,6) pathlengths=[] print("source vertex {target:length, }") for v in G.nodes(): spl=single_source_shortest_path_length(G,v) print('%s %s' % (v,spl)) for p in spl.values(): pathlengths.append(p) print('') print("average shortest path length %s" % (sum(pathlengths)/len(pathlengths))) # histogram of path lengths dist={} for p in pathlengths: if p in dist: dist[p]+=1 else: dist[p]=1 print('') print("length #paths") verts=dist.keys() for d in sorted(verts): print('%s %d' % (d,dist[d])) print("radius: %d" % radius(G)) print("diameter: %d" % diameter(G)) print("eccentricity: %s" % eccentricity(G)) print("center: %s" % center(G)) print("periphery: %s" % periphery(G)) print("density: %s" % density(G)) networkx-1.8.1/examples/multigraph/0000775000175000017500000000000012177457361017271 5ustar aricaric00000000000000networkx-1.8.1/examples/multigraph/chess_masters.py0000664000175000017500000001202412177456333022503 0ustar aricaric00000000000000#!/usr/bin/env python """ An example of the MultiDiGraph clas The function chess_pgn_graph reads a collection of chess matches stored in the specified PGN file (PGN ="Portable Game Notation") Here the (compressed) default file --- chess_masters_WCC.pgn.bz2 --- contains all 685 World Chess Championship matches from 1886 - 1985. (data from http://chessproblem.my-free-games.com/chess/games/Download-PGN.php) The chess_pgn_graph() function returns a MultiDiGraph with multiple edges. Each node is the last name of a chess master. Each edge is directed from white to black and contains selected game info. The key statement in chess_pgn_graph below is G.add_edge(white, black, game_info) where game_info is a dict describing each game. """ # Copyright (C) 2006-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx # tag names specifying what game info should be # stored in the dict on each digraph edge game_details=["Event", "Date", "Result", "ECO", "Site"] def chess_pgn_graph(pgn_file="chess_masters_WCC.pgn.bz2"): """Read chess games in pgn format in pgn_file. Filenames ending in .gz or .bz2 will be uncompressed. Return the MultiDiGraph of players connected by a chess game. Edges contain game data in a dict. """ import bz2 G=nx.MultiDiGraph() game={} datafile = bz2.BZ2File(pgn_file) lines = (line.decode().rstrip('\r\n') for line in datafile) for line in lines: if line.startswith('['): tag,value=line[1:-1].split(' ',1) game[str(tag)]=value.strip('"') else: # empty line after tag set indicates # we finished reading game info if game: white=game.pop('White') black=game.pop('Black') G.add_edge(white, black, **game) game={} return G if __name__ == '__main__': import networkx as nx G=chess_pgn_graph() ngames=G.number_of_edges() nplayers=G.number_of_nodes() print("Loaded %d chess games between %d players\n"\ % (ngames,nplayers)) # identify connected components # of the undirected version Gcc=nx.connected_component_subgraphs(G.to_undirected()) if len(Gcc)>1: print("Note the disconnected component consisting of:") print(Gcc[1].nodes()) # find all games with B97 opening (as described in ECO) openings=set([game_info['ECO'] for (white,black,game_info) in G.edges(data=True)]) print("\nFrom a total of %d different openings,"%len(openings)) print('the following games used the Sicilian opening') print('with the Najdorff 7...Qb6 "Poisoned Pawn" variation.\n') for (white,black,game_info) in G.edges(data=True): if game_info['ECO']=='B97': print(white,"vs",black) for k,v in game_info.items(): print(" ",k,": ",v) print("\n") try: import matplotlib.pyplot as plt except ImportError: import sys print("Matplotlib needed for drawing. Skipping") sys.exit(0) # make new undirected graph H without multi-edges H=nx.Graph(G) # edge width is proportional number of games played edgewidth=[] for (u,v,d) in H.edges(data=True): edgewidth.append(len(G.get_edge_data(u,v))) # node size is proportional to number of games won wins=dict.fromkeys(G.nodes(),0.0) for (u,v,d) in G.edges(data=True): r=d['Result'].split('-') if r[0]=='1': wins[u]+=1.0 elif r[0]=='1/2': wins[u]+=0.5 wins[v]+=0.5 else: wins[v]+=1.0 try: pos=nx.graphviz_layout(H) except: pos=nx.spring_layout(H,iterations=20) plt.rcParams['text.usetex'] = False plt.figure(figsize=(8,8)) nx.draw_networkx_edges(H,pos,alpha=0.3,width=edgewidth, edge_color='m') nodesize=[wins[v]*50 for v in H] nx.draw_networkx_nodes(H,pos,node_size=nodesize,node_color='w',alpha=0.4) nx.draw_networkx_edges(H,pos,alpha=0.4,node_size=0,width=1,edge_color='k') nx.draw_networkx_labels(H,pos,fontsize=14) font = {'fontname' : 'Helvetica', 'color' : 'k', 'fontweight' : 'bold', 'fontsize' : 14} plt.title("World Chess Championship Games: 1886 - 1985", font) # change font and write text (using data coordinates) font = {'fontname' : 'Helvetica', 'color' : 'r', 'fontweight' : 'bold', 'fontsize' : 14} plt.text(0.5, 0.97, "edge width = # games played", horizontalalignment='center', transform=plt.gca().transAxes) plt.text(0.5, 0.94, "node size = # games won", horizontalalignment='center', transform=plt.gca().transAxes) plt.axis('off') plt.savefig("chess_masters.png",dpi=75) print("Wrote chess_masters.png") plt.show() # display networkx-1.8.1/examples/multigraph/chess_masters_WCC.pgn.bz20000664000175000017500000030360012177456333024032 0ustar aricaric00000000000000BZh91AY&SY[64ª½ß€Xÿâ¿ÿÿº?ÿßðb‚>:ð@ù!L#ï§ÐÛ2R’ETêð¼ÕU“V°4F™ KAU ÔƒJÈi[Þ}Ï+ël*MÅ[e6ÃZѪ¬)m[w©÷ߪ‰öM1EQ E ©"¨U÷ÄûƒÉ"•JPDHTD› ¬èU•ø”Ü ހΠž1nGtÝ÷{Ù zÕd;·vÇaqÎÖ#®]S]2xæãÝm›IU ¥)BézáÞËÕ]ÕÖš¯¯}Ïž½>²›7ͪ€ô-†BKåQuv³Y”‘@"SM[PÔXÚVÍS•QgÇ]gß8tK6©m™tä<Þ§¾îï°Ovh6ŽçT:¶5ؾú¹aó¦”îõ÷×:5Ü:‘]ØûÚ¦¥ 4I{6¼<òT*^Úßc«ëÔµ€€hPhP€$©Z€϶7|}³î Tí‚Ñ¢M³­}óí÷¾©ª µ(UkTmî:»×¹±ëx=6os§`£  e[·ƒà@P¾¬ûé*}hˆ)ènÙd6e*P4ä}Ï.õ[ãomµ*ª¦†6"Ç|‰ØiƒïD Uõ¨@6i³Oc¢ì fJûÝÍ`B‘Blj(ö°ö4¹ï{îóåçzP«bÒm3O»ÃA¶ï‚ô1R¥mlbÚmE)¦[}4tBQ4¤ó=í/n”Ýëîó­™LÌÉ© ˆÓMÞ…3^>Š}/«ì¡¾°U {aJ·°ö ¼ !(4-d(M!4ûÞ4ãÈó×»ë-Xíµ‘­Bl|úªÁò¥%K¾àq!JãíÐ% ÷®ê*cd€PW³"ºÒEm>©×e—ªÙÝN¹¢´–«*+6«ß›½X-b ÐD„Dª¥*›-÷u÷ºnrª‹­ƒhÎ[åšÞºEwÛªNfZk3Mß[Ÿ>5R€m”`‚RT”‘'Nãp}»êÖP*e”6Õ¶_}éÖN÷‡£ºaGNË.Ù]jɧ¹ö   ¨P  &Wx˜´>Œ¹³× ©Õš²RÕ³àß{­Û­k_vôlõ’ç9Ö¶ö:½ëpŶ{žºˆØ|ñ¾ûäêÖ©m²mU¶·gnâšmëÝï°1Eí­(mo;);iŽ-•¾øªû߃¾ÜÐ7·]:r÷9Õ„ÛzÝ}eÓkMRîˬnûr¤¾ÇÎihqc–Õ@ÞúUw¾(µõ›tè Ýº’Ø Ö÷puµ·ÛÓ¾ŽêÛdm}[Ã]m*ífrűÁÚùñÜñõ¢»ës$eÓf ©,ÝÁ•)ƒ«e4äV¤¶ÐQ¾ù,uꊭ ¢Mo³¡ÖætƒmÝ”³Dßog{kT”i´ÃL}†Že÷Ý ¢€ =( ”IJŠôšeЕ0©¡Km¶¢–e¾íûÜu&ØkÐéRŽ3 Ö@—m¶­°*AIGÁª~&D¥¦©åQäj4ÓЉ%$I‰  ¨h§’$¤I©©0FÔõ2hM"QRŸéê(š €BR„˜šš`M¢ž €¡²i£S24ÒJ$) Ÿ•7ª ÞSH†€0bEA@j(‰ãä[kU‚ƒlbÓN´º]Õlë`Õ`Û5±³±‚ªÑ›!ˆB1Q‹€ÁÄÄXEV+Œ›F lâ ÖÌTZ¬b­­¨«DÙÑ­51:4mlADF«IQ&Æ™:³mPÄmÑ¢’(´[Û&¦,kj‹KK¢¶Æ&6‡IEjÛiÄÑFÚ((]±cXŠÖŠ Ë­-Vص`Ój¦ÔAµ:ìö½wœû.IÐDØärnAöG{ïljj~ÈV(y/( u‡ýǬ8ÃÄÔ“‰Y穞¿V)ÛHk8†°Ô$ËOËÄŠ4¼u©.Ú+ d¹eHI8âOÑ$Ö@×®!‰æΤã½K¶E8•&3ZÌ@+SZ¬yaGSˆówN½C¨)õ'yCÞ¦0ÞØºÎí"ã%dèÏ9Ë×ÌÔª²ÌyÛ¬©ÄhL@»jç¨nÒLE‚dÍ´í ˜äÀâCYâ#KG®™ÀLJi˜“4â /2:Æ'”™ÄœqœHB·´Š/ĪÌu5ë:ë׌ aÖ‡Æ|Hb‡Ç\Lu ã0gÄ1P:ÍMb"™¯;añœf3ñ”ý,Œ>?;ó µ15/,úÖs—0Öj}O‰+õ1‹õ“½ÜøœJ…aÆJà†'â&mËdà‚él›>XÛÖy®÷”Èí FzÓÖkËÇ!Ñ®|áÍ=Û@Æq;iÖ :mËUbabŒ,Ì‘íîSûƒl°ˆ ‚Kì2{pŸ ЉÚXõ…„ãPbùúƒä˜˜ â`Ï©? âøžù~¸€|Y>$ön@¾IcP¢}ù@ÀR2µcŠ…8ÜhÅ e*YJ/âÊ£ sÓzá‰#ærÁDÁ$I'ß“?'TÌ@séä´ÌD|Ç7w%F@R0}d¬ˆ$I÷. IÆLDAlŸ2âÖI‰ñ¨}L÷a²úÏÂduø•2>JÎöäFufÚd-ÝüÞŽPáø¤›H³Ûó±™9óSÁ´|“…Ìš ‚ž´Ìª/ÆmY;adcêf¨§Y‘øÕa>0ë:è |æ9/lá ¬˜<#%TÔI3Òüd:ÌI*hF$qúà)<ÊÈ;qȉÛ/Û È§)×&Q@¤ü3×?:ÖÆ¿déO3yu8¢e›BòÏd:˜ŸŽ\@YÊTS0î{%5”ŒgÔ33ní¹Tå%EÐ¨Š‡P Š ]B°OŒ¬OWL×`Ì?6#Ö¬66tÏ|·+͆X µ»‹¶&¤šYÖRqV‡Ì í&kU¡Cãc׉Æ3§ƒ"gnA´,b1ž¥˜•ó (Z{-¾Éìl,Eœg+f(ŒF •½J3©ä½Þû™EŒ’k¼u2]ؤ¢.û1AX*ƒ#ÕõÁssܼ÷°Á~þn1õ)RÁí%âXÇiHˆ%T•Óf7~áêTÉD€QQÉÄ”¼!±ƒË™%¼gw[ÓÇÊz“ãb"*+Æ3èÓ`‡´†`òïçK†ë¹&”;¬ ß:%Ÿ)õÈ'˜R08A„C×å@w×›ÎdÓ)8ä‹g²œµâYS‰ŸK ö¿iŠœBˆÀgl)U/£©ç‰Cš“Úf´Gõ¥SÝÊ?=,Rnwt¨°3øUÚÖû¼ý? ¼‡çÂQš+*]ÊÂEˆUÛrþîàkyÉx_ ïDšëÝWžb Ñ5»8§»¨{¦Ç^£¯Ý%êï¦E¯:8ÓñNðÓæ8#’5&ëµëCN!újª­\xÞÊ*öûÝãXº*ßoß{š›—"–ÓÎC¥ƒÔë¹ñF¡|Öî½`Ó6ËGè„ âUC’Åu,e­óÇ÷1-°«:µ‚p·‡v¶´÷4? Ù-|BóÕJóœõñDÅæo®ò÷¾öøôA;Ԧ¶‡~™×-ÙuXÅ.¯eI…#¡âa›ñŠ”„+^h yd˜Xˆ’Q¾Fq ƒ:34E–5• ùÖþûéôø¾¨¿JG*”í O®úb:ÏišòC[!¦vÎî’?«Ž¿J¯2¨nnónr{£Á†Ð±ˆ¸$wÕÙMÁ­J¦cªÅ2Lgç™nýt”c ¢›ÏêÁuD`Üq È༽0A‚(LTH% ùM'Ñ”.«uXÜ\c¤tUÐgzE5}íCqy1ÇðñYV¿¥ö¯‰ùÍ`Ò˜=šì@zvq*fdy÷\x"²ðoA»õ|ÔÇó$4Þù’2E%¬Y˱ möõ«× ¹¢Þ´½¤ë¿­BÝtÓâ‚q5 Š ^ÕÏNøÛ{«HâCZvó‡zîBXB–SC.ý¾åCiJSó¶AaÃ;0tL|ˆ¿9faAïF–8ÙUl‡å8ft3ÕÕ{ç¾mpí° 7UWÃÄ­7¶ÝoHÒXQhè™å&Áô7¬}›Ï[»°ôë½oèIF↠4Ò„°ó ó‘8g=ˆÓSkJgø8˜…vld=e ¨áŠö‘º«ÓmµCÝn•°åä®êò<Ø.«ç9QwÕå/Úž>ÈŠ½E6Cb‰„lû^êšê«¸S_n@^0³ WÑkŠþŸVyyèÒ`›â"æ[^’­'ݾ{kÈàKf¹ü[„üå({'`eS¸…Å7I[NËݬç©(ó0žcü­{ªéµh%U ›Eù¾›ë¢õ5eæûBØspñ†ànQ"Ñ,ݶ6 ´'VÁ}©è^™ ©k7’÷;O·_{ØüŸ3­¦?ü’ê’F Ex 2¤(‚€î¨J‚Hª÷¨ý… #*$00€T€p ‰(`L(dBRHH%$@`BA`U %e’ RP$aD‘ !Gª(‰ „ñ=^=>þžWNÊ5[ù˜Úÿª·YÿCš- ͆²îEÅ¥òžMóqY‰/¶ 8’¹/ü^ñÑU÷y6’ŒìÄÜ*Ö¼€An4•Y0š^\QWz»£¬˜þrțf¿Ä‡ñlÚ w03¦üntƒ5¶Wh9Ý''AjYÏ8æá(Lueë“%XÏÝÏarÐVïgjÃ5, ̹ǬO©80õÜ/W9±+nñlô·+9Ü·ž÷Vƒ#Zµ<åÍ>çDÂÔFç¦D~¹Ä«,º:M¯w»µ¢ÝÓN>êe¬$ʺºÎ¦Vv±¨¾Ö–Ú"xlj¥%ÀÖÃr7ê!(3:Ùa%ZHOì,ä„z.yܺh)p8qဧˆ°ÌÚ-¤k`'X£"Ë7lG¹bR0UâJȆEo°µÍ,HÏ.ŠÙ¯!ɵ–Zò“rÛ%År$ÒúɤȚ"Š—g|Óõ ´MG Ÿ4ðZƒ‚Î1bÁÛ,¤ Œpç+¢—˜ÞG`ùy+VÛöO¾ôj#Øáa !ó%†&êì«X¼íŽ–Ã±Sã”—œ-˜¥¢L}‚G£t·,4n s IÖlí’±ÛÔ$æsg&0Ó„£ˆl‡ìc¸ðÜväN°nû=¦/þÃáW@‹)~OÂ1cJ†2¹ãvs¢þ´÷esR2Û#s:äŒ6f”³TÅ®dü›ëæD)º±­Î*ªWSÑZ=ŸÎ˜i§IT,vü ±´îK¨È®kƒPšîÌj¤…Z¾|‹¬Ruçn¢d>—µH÷:$òe_7nÚ¹GµéJªlפм!’4Wl-ZB#Ú«R*ÄÓ’eJü£"JœVè®Éï'TŒ™Fꚪ‹Rª'+fPW|—jW6)Ë1é:{Ó–?_lf½Ž}Ûo9ÅC¿J…Í=™Íñ¢º5º9s%*&¬6;ËÝ(˜B$¿—݉+é¶–æÈ³+µûïû¢ãòà‘ 7&F:쬶uHCcƒ–Üé´†ò÷UTÒ[³ ^¢Íª:d’ЍòÇ1#ÛÊU,MLj×aWî‹ß˜x“QëÜ앉¼fO¢ºhh‚¤GÚÑm­]MX ¼ÊÛE™\NÏåªXc™"[Sƒqä÷¥S¤ÛâÝ™b«Dôš…K"ê›v¾O“'tÍ+/ÈV…™ˆ"¢§1w[Xyê%‹HãŒ/—æÁ¹a)Hå°¶µE´›­ï?ß»ð+é$Žg}ª~Ö0åÊüžLWGæü%×Ü \æêµËf+‚±ˆoETr­DªñJ› *ן-í|&ÂÂZÅ™©òcª/Ÿ(®Q­þW»­.2üÕ×Í<ë!­4  9¦ IíÔÁ5 Û”Q´Ú÷5¥SÚœ©‹Û>u½èõý;`ßpçv½ ÎVt튜D³ñ›ºEO•Äß[²ÙQçzÏšíæÛ2Îbu¥|næi>ðÌl»mvs[ZƒÇ …©ì«ªÕSºú­»ðf?kòm-#0ùã–=Ñy^à´ÄP;Æld†Žõµ'ȲŸQ–×\ú½ÝvZË(EL“ΨÊñX6cŽà¤¹¡Pekåöµ¦­Šœ×•Ks:ØÂkm1Èz¡'o½MÕŠ\Yl$y-îé¼Ä³î–ô_0|–Vßå‰/¾ï9Ùïrë]bÔÅÂ嶪«ŒnƒVµRI‘J©bˆƒl%2D4 ­FH×Õ4BSôÇ76ufš[òTQ /(·FÛkzÒü5"ewÚþí«Æ¡A‘È+홣za±«bù[f‡ŸdÏšú?‰ ç@$Ú¢ÇÛë„V2¼fG^ÕVÕ±¢Q@pÖíŒ5Û@CPÛ mT-¢R'Tûî÷ÕUWß}r¥fDy|¢5‚¯š«e®’®TˆÀ¥Wö<ï|zlðO–¢’E$í†Õ F¯‘«n¹m;æ¥[‹•¬èWn´¶·û\uöDóçŸ_Wy}4u·º‰ ®g-¿ÌK*ã—-v¦¨‹ ¿j{\sWàØ†£ vë8xéÚ±±&¬Ýd”u’!¿·̽ÉìË·Ù-Ö+Sb‰Ýš¢¶)´_s.ÃTTq¦¨( 0$ &r-IƒE¶!s¿/'¥Û;z1ÊMÐ>i¾pæHÚ•µZ#rQ=z)dÍ+¦X®«jÙ V½xë`µ?‚ŒvvãQ-ýÑ„à&É}·DÛ®’ڀ풓.V€PÈEæïÒj`û„Ñ+‘DÑ?s&lo~>iÖ6 `R ÚÖÊÎìh]»Žtݧçvì PŒ#*¶Xë%•HÑ~&׃Œ‹óq¾è­æœÙ_Á“vn}w>ÀÍåMEÛ„m[‹S*ŽØ  ¶­JØÀªz­†mØ£ërD[’ôwbow¶rfæ6¤—#ž6-#„bG9R*S…¥"n±Jí­W„BšlX%¤ûg¾Ûíµ_Vž·>MXµÅ!½º^‰ò8›FHÚ%ZÜjŒýԙ侅`wíP®B°‡hÚ±ð$<¿}„ô€Ú,Ç$™kUeÈDéÖ:"7%ÇSMhÁ¯å•cUo‡ß}YÚ½iõª-OcjýÕrÕÌ3T‚$ k‹½×°zλK!Í®_Ñ}ÖÎ0y¿zU©Æ©ÍªåOÖè€{<†uj¶[i]A\²:X¾tfÕ^› Gl Ü’!9º¬ë*±'ŒÚ·qªÛÑ ¯ºES×%|2‘e$­ý©ü>Mkb'»CçョHì—‚`¼Bg0‡í ÍGKi¯„‚^Ë6²coç*†Æ~ ±uYcŸmødÄè_N¨>ϲíXn‹~Çò8‘äçÛ— ¼…‹U5œa•&}›P€üIˆ¥T"glP&!nŠ!M["é×§†(»UQøÔFšm–élŸÌKT#±õp­J¢(Ô‰Èë%vünMzÕN¹E¶U×O+uv¼Ñ¡ ß¼×íøäHu^j¶$ê~V‘»\ññîë:Gå!ç±J£:ØM¦ÔÛ¹-–6@ci¨Œb£°)eÊžVÁ1N«j4×»²§]>¼K:^9ñåÀ⪷XÀÚiÚþè!éÛn2}ºÊùŠå‹À*ÄýÖI)é¿»Íi˜º,8G—ʤm9(yâp·Ö‡"1ë–%«;no;£¶i@ü.î˜èF'Š‹b{ô¿«œ<õž£KfŠA»vÑFX†“QE¼nÕz^mìBЦê%ŸÄ’ÉÑý_Cu²©±Å"e¢¶If[hì©ýr+v\,‰Û›,Ö9jdrפ@Ñþ^µwíÚ zÅÓFœq?š½}ìaä?Wwd&‘yíX¦Ùøûa¿{«Ö§1Å7DHÅk" ¯¾ÛzÍ?i­UZçkèçGØÀ´LASŽÉñº Üó äÎà»áe,”v¡y±RžÞAÚø9o·®O :ÝÝW+±Iø…ëiI$?;ª6™±KÚ¡¢ˆ©Ÿtlœs»²Ç¡©ƒl[`õoZ@|5 ÍšôŽ=’ Uq…¨øTW1Û çnúƽÝÂfÛØ s¤&Î=ï;ÎvmÃ8s7ð É!`(R”UJP‘TU -R¡30„«0±4¤J 0@È¡2AL=ï®rŽÛeJÛ¿s6|tÜJ"T£…:¬+ ÕFWl³nÕhÓKECúGæj®h=ï-ÄÖ§ÄÈÁzå%qʤ,¨BØ™¨Ñ ÆoòNM@çïŸßx Äz{¶ï+.·ðÆ^Zó–ÖGk:µP¾ëUådT©¹µèo²Äšólf–â_)T8pf5¨iYv"$’HW—®Bc5¹!O=oZZ–q؃)Lå–Hj ï×Û³q|ζD3híØØ ‚«ÜŽEé…bÜ™k†Û,¹¢³ ¡r” R•QFF¥›vÓÅâýb µùnMU¶ÚØXÕm™?‰,”S¶Ó¼YÛõ¾”"«žG:ûXˆµE¹¶]|ñz&]T2䕊«;«å[yÇ‹æÛ J«p»±m™qØ=VØ(F …u¶ñ´†"Îìß-Ë ¹0Ð0 IÀV ÜL0Ø$fy*íûÌI0Šå¥‹ÒíÜy ]þ}Ú=Ng3;ÏfÈåݪèÑ 9u´$ _I¼×ªïE=Ý})µm"R·Ã„º9`9ãLSZLµè®G (ƒn¥>‡½ïk]ìi† ÝV­6©6«wf ’„ˆŒ‘Ë‚áiY¢4±ZÊ¢¨ƒ„(¿‹1kŒ»*cN?XemèÄöÏÈ\ÿ^Z¶%³Ú÷ø(hÿjìê¡QWª…ûÑ–5„Û´6=¼B~ž,UÙÔ…ôO¸·Ÿo.‘xzBbˆ“Ähñ¯žÞÆÏzÀû§îk–¸” -Š:ƨ›jl•2Á:ªµµ´—×fÍß„ý ðœÒ²oÛ§,étÛL£ìNdêª %“°I!ÑW úSš4¨¨Eî©+TSuµá•™$y ’ÆÛ›íØY¼ØõT­¿ÌÌS\5Ë)Ñ^ )ŵ§PÄåRØ:RFÛCŒ†k®40%‰ülËJ§KÔµ-Tòþ\»d(W–Ú‡ÓcVHhPhÙ0Í›èç9·SÕ ¸±¹^µ5Ý$=»„V qwMÒH¢™¥nã1æü…Ë/-]×.½Z+c´WdæµvË’›Ç[V䤲–DÍ 9º¬q1(Î-í–Q¦ÂÆqDä)¥¨að½½ohݹ@º–XÑ”F ¸6FF4’l_¤x~i}y­ßg8\‚¤m+ .$£³šû­#±ÕîNî«·»÷“'{!ïÛõÔ¥< œÂÂAlkæÜh(B¤Ù…M6t´8ð‡áÆí©½Ù0m뻥J‹¾¾©<÷žTÇL‘cnïò­Þ‡51†HXÏÇf—>aº ÒÒ3[cBb´²ºÓŒcnÄY\®•„’2¿k¦lP{NßÓêõkc;<5PÅ>mDÅ\ÖË”m»c®•ÅdÃZÒÎíÞ ©Â7ÙÕ°êÂû·#$®F03ô8‚Þq]·. pìÁ2Ñoç†7DSÔQª~jL׳äÛ¿£ìsÒF7\1ÌL]\GÇÜÕ‘íÉ"e½Óv½¦€ÇÝüŋۯØ)⩎e]-I VK}ÈmmlB;Û7~_e6z•*&“ µ ÜüT íO² ™Jˆe€* (¾jS¯wð[µ­—V­›Sé%`¹rj®›¥"뤯²Å¶íM¨Å¼î¶ÌØiªêf‘ž-JŠéX‹—®­žZÕÝŽ}]òú9_yÝëNowN èM!YÚËÓ+²´«¯îÞ |ùËaØh爹ù¯:êá‹–„6ÎY9½ö}ٵݳfDçÊch,úJ1{;j‚}£“7uÝÕ2X•i.è™0ªJA%Š-›¨[·ÂÕ °-îü³Z³Æ!xؾ»¨)°´¢,„P„’…±Ëòµ¥i5øèm$È/;›†h@aã-¦Ü¡ò»à®; j;µšµ²]µ^‰:Ù9°î›_=ƒÅ¤867¦ï;Bã¬*^fêºÓRn÷~ÝÃåäÈКjRd&õÙ8:ꥥŒ‰½H+ujé¶¢j,mƒ@Ýr¢ºËÞ4½œý½Ç£ÞûFú£vGŠàPà4„GîÀÙŠ6ÞÅ&¥úª^7•¥LÙ­CV$4Ûi&àÌœƒ1# »›¸›§ð=í9n{Š—ˆ'7›úåVÔÔ½ ë,Õc¥®ªÆE× ç¤lÞ¦š·Õµ»ŸY»Éöî*Ò¯rã¶/ »—Û#Æ'1݆¦Š-“F…ºÐÕ|†+«ièÈÂûœÆlP{ {Z›k¶_¯xs›ªÜ [›bjDÛCÛó8±êþ\â3ö67‹¨§Ÿ-œc¥96ïGµæÖ‡WÛ´è‡DgZâ3)AÂÖÍñ7"ìåÕð©«ôëÒ½†>«HK©Ë«@%†ºÆ+-šìB4oMñMS—J£9F ™JâÕ÷/ë3¨ñê¬sŠ¥mîéøÞáµÝñAn©#[µ»wMçuˆ(ss€üÆa°,ÁœV˜`¯H„&UÆRCwUlÚ? —²å©"¾zQKiÚëwùlç§Ä¥„“óãWt?¬<*‚ ø¯>; ,LC¿&é|07GZ@Ø&Æ lbA8Ûq©0¥ˆæ(à´^»—^èQ L¡Œ¥àÆéwVÝ#Îë*pÒÓ…¼â2:”Ø>þbÄ-ò‰£¼žÖV˵+U›íˆÒ‚CÑ‘Ø2@±hª™_èÕoÛ{g/kôtH¤qHß¶eozãâ-evD¼×‰ä œ+}%èºM£Ji žAÝÝUh }=×ï˜-öý]L…¶E1¢(¢µ4à&Wá©4ð.„‘“6 ÝÒFšÎ*Ül/I¸sн—j»VÍCSš•5Ú(©©:Óu 5ØÕ‘'h»`HÊ•Vdv™·ÝÓ ¬@‘R{²!S·n©,«r´ç—E Ûmt1ÀóB`™˜× Xwrc ƒ•°Œ1ã¿–%ß»4õ=•µ~o\ÔÂTˆ?±ê…œöàºÁÛ‚„Êy‘²?M’}˸¢„7$aâßWRŒ-&HT³™Í¶’Ÿf}.+m‡|V×F'×ÓëÏš×›e¬Ù$c²]ªÒîQ|=.Ž&»5ŠX¦÷ï1É-•Ee]z¨­Ý ^e¨'B$î#;¢Œ9ð(,„ #L¸Ä{TÒê1lãyTÜAvè'q™n[»ôN–Š)ƒ^œ;F¶ØZ8t¬ ‹!Z…NAìÝV]‹Ñ:Ø­"Nà 4±|³gÓð}ûC‹|p1Ìbc3¦f/42ð&è#qǸ ëÃG!tacÃŒ¼gsЏ°Ç 8p×’ÓD½$ Qê4ruhMÒ]#-À%sÚ馻>j¨óí9é`ìÄ•©ƒj&I[TÁo3ÄI'¦ š_ s?v—63j(ëãt[UN/‡Þ*ç«Ü½ÓÈ]óÅ¥TDÐa2*º¨J²lT‹'t ‹yÒð¼,Q,’60¤M”“J1•ÔÊú­Ýw*”u)¾ƒoy»0ÙXÇ©QÝÕ¹²ÁÍB»öÝ4]*Y»)ç%îõÎánΟjw ìàî2øèÁzVíQ|bH£Eˆ -+´˜×Û©­s—mºÖŽ–ß‹ Q=¬MʯÇÅ} ¶Øw8RÛdGJéZ–A¸Å²Ojúçq}uίžkf=VÊJÖ –‚•RJ¹A³ŽI+WmÞ¼ÉvN\¯»—¨=%kÓq¸.ªré£*ÌXDMófˆ‰õ©úœ›¥Lئ¾’[a±¹ÝS˜6å—µ×S›|Ù]æêuú¨¢·´=›=\yõœÔ. ŠEîjʵõsºÞiË*vm„va5 ES^¢;»ºíÃÝ#}xd¬cêâKˆTœXÒÀÙÓ®”š˜]a‘,tKÃÓºŽkxl(è1ãZó`§šÝ™Ÿi2lé^F—5:Õ¢$+1#­!ªhœ¦Ô4ò´>¼TQ½©–#ÞQê´ÕwE6’„‘Ót¸çF¤—˜j¨ºÌ÷)0Î^“ê¤jí¤Œd„^¯» J†Àžæ²Cs£ÜÆ,©pך)þ\{FTð¶†gb„ð˜‰\÷†i\…+˜œ)N#6ž=fFŠß¶ÅtñeD|9«L¸7!Ò¨J)î£"_%,­LÀO¨E Á•BJfËë9ôK|¥ZÐãV2WùØìúýLö§8¥ŒüJ‰mI«µÇ[H<û¢™­žI=ÏšµaXÕHÞBÍí‘mG8!zí¼£ã»Ôâ/²L!´–ôÉ.Îg`|äÅý*iˆÉ­ƒ¹3n("u¥ó€¼Ç¤b))9ØÞÓKXY[ƒ ©4‰–æ} -¹­6×túgÚÙìOW7£,¸Å#û•lÈØx°% ,a±–8 3uÙ>U^W÷ÅB‘¹ËˆÈÄHO^: Å®jÈ~e5d°á­ývÑîì{p‘,¡ëkVÚU¢z5®ši’û¡¥5Ĥ\¬1ŠéG- Œé¤œª|á0n‡d—[rÍÛ(k†œ|o¼¨nQªb©7ñ„–Ö0®æesëšäøïÑa}8|ïÜ¡â£ð战bÒ:‹†j( hI¦‚ˆEžƒTƒC!¸&x¼#‘M„á\¤àJQlT·nÙÐàÚ–ƒ»¤ºÉ¸n &’VHˆ’ªDÊÃ.4Â¥1+tßùO–,soS}o"kW‰bu"³j%ˆ2’ÆCˆä m9Ût*!Kw' RÙ+HÞZÊйæ2ñòeÙ:pS¸êÔ—0p m°É€¦2Ñ$‹.´b& <†æÑ”.'-‘S%XMᯩ9{ÐhÝ´”êæì;m¡6<$óz>8‘ÅQØŸ.»ö²(\[8N⌽£®mEL X)%Ñ– & Ä ’I°œS7²fî¦W%Ú˜uqp;ìej„Oeomiê‘ÑùNëºpÅh©Áq¦U>6Ѩ¦¦S1e®š ¶(c:–EsC«bÍÔžý ¢v#u‡+{Ž-ÛlpŠä=˜&DlÌ‹#íë&\û=LesˆR™¶ñªëÕ«WG·­)•Ú ý=|œTŠÔ»²ºft¦!¶ñÔäÊó4+¤ˆÚ¾ÙXDMX×ÈŠ2ä|³ƒ*r) ëjI­¦Ý‹¥­¹oÆÆ+¦ÊêÙ <ƒTÛ­‚Dª IdÇ1»‘§|^?Vdò‘w>ú=2vÖTa°G(7 † Ä0&X§1$ªR»CÒá{µCûÒ–±Ów4Ô¢·l¾·Œ\g©‚3!‹ŸŸÜûs~kb­açvM.Š\hQ†ïÒÌÄÊQ×¶Ú•mèCpâuºQBÛ›çGéÑ^S«Ý¢·lnký)Óâ¢2Óu6qw=½¬ìû–öSÒhì²ÜÎ0½…&êêï!¬ç× .ãiì÷Ÿ}EîÜdC©n¯œ@[»ƒí?J7¹»Q„$2É!3ÑÀˆŒâÖ4ÝÐÅT¢5}±<¸ÉhÀf¶† <¬ÀÛ©½PjWq¦H¶3ƒ©£ª8wcš[Ì—¯uÚí%ZªÙ­;)-ç=æWš#ÄŒäÝ_p­{}vª-Í¢fo†ß‘a „ŽÞ:ÏrȬY© ®áCSJ§Dã=‚Ÿµ;¢èOq" k­ J}2f-‚–WAvŒû)´óc‡8kœ/I¶Z4ì÷?|ó¾ÆÓ®}vÒ¨}tfE²ß^‹|¼Aû“úú‡qTŒÞcQPlaƒ5¤žEÔÞàÖ3jÌh‚ñwu<±k) å³3b0EÈd…±"ÁHr0ÊNfP¢ÆpV+Ë¥w6ãÍãV±Ê¾²ẗÅü *%˜T××½¾fz]]y¯\ŠUêÜèœ)Ô&ØðÖ‰§LIŸ«_F†”R&.× KÖŒ×:pE';SDrœ×ÏŽS‚ýíµÅæÓ¾n¡Ö‹é@Úi×]T4SnÎÖíLêÓ¼½S^.Çñ«n¡5™Œ¾}Ɇ©Ž9|ÉœBéó#†v\nÎ"2cÕòÈã‚ +©§1­Ç]s¨ccIÓš^Å#™Ð`Ë1¦š8ÇZU…»©ýÇ&œ»&ìH.³¶->]ô9ávØS4Åé>ô¯ÌÎPŠq*FVl!†‘8¨y>¹¡nÚšN¤Ü+"¶a#u¬´öö£6 %grçË^.¿.|¦ðû$9H,_ÌFr]ésMƒ-Œk'79JXͳ¢$‚¦Ùa¦…²Úý70•†´Š fC ÌËÑRfµ~Zñ–FˆJòCy¹°†Ô3Ì” 0EàJMç0ˆd!¤Ïé¡ ’qûFEnÚ2ܜ݅çgñm Al#×g@FZe¡w5Êì(% Ah4Ný¹¹­þXÐ&¡ù.B‚™X&ºÑ#íhŽA@J¤Sô"U{¡ö±Ð®/pAôõ]Ë’ŸL»kU·#6Kô¥7}…jÛ‘–„(,ìù0‘àú„ZñXviúÙ¸Cá“¢y4ýƒ¢|¡&‘œ¾Ââà0¢Kåu„–ôŒòN'Îçz‰i(ç+HVˆÛ½$•!òmsÚÕ~ØMQíÞÕn¦½8ºjuö ½H륎9©‚\âÉ4ªß<øÉGÂdñ÷}Êóý}¼m™_&HµÜ8#Œoæ^>ÍwFG.´&v™PHóTŠÅÎvÞ€sR¦‰†É9:´2äª6KüÖåJºÇS„äÖ¹RÆÔaí¨Ïb§*­¡ÕYÍ:.ÇŽŽ!”,|P&àe¡ ÞÆ17£-8Ð]“M|ð³Wj’HgŽñJ(Ç÷šØÇ©l˜›_128¾›a„™üÞf|»ÜâX1„ÎÀkadÌMåvåQj3AFDY(íH†$=±Ý¿Zª¾bÙ/„ôÍt¬ßEiÓnŽÜjû±qÚ2’n±•Hƒè-52þÕ˜.4fØÆœƒr7†;ßwŸŸŠô¶—žxÆzìgˆq¢¿ué÷vÞ i«pç *£T&›ªŒÅ‘qLÍ¿i8ƒ,Œc=Ûv êáJôUµ¢†|iÚü¼´O}EÃðÉ!5Õ¤PánÄ·FÁAt,@¼pKY É‹æq ®^†ÔX–3òi ¶*–î‚EÖ2$]stdd%“ãÖlvfïš~uoé¾ÒOë1'L‰¨b´¿‰L›,ü¥0³ Á…¦”‡›H.Ÿ·—:kµŽ©W‰˜Èó‰¸óšÉÂ’£¢¯nßçxöûËÏ#ü“.Bìc̵Ÿ›÷wrÓDëàê´ïÎùÕ‘d&Ȇô& ²š]„ØMµk‹îOyI‹“ݳ§r´9œS³gm¹+mXpñ$Û†ìSºh:‰`ñ̦%µ¼BC`CôÐé@šÎV¦æÕ€¯s4ýædiê<½Û¼p‹›Ó÷J•©é¶.ÔYÌÍ )XÀÛ`\q•žî”éØ×iµfgšî^ôáj5Òßu0BÞ;, ÏFÅA ©66^–Kd6Õ"•È~ºiíeðE·¨°‹/ûb-®ž¡âî@7CçâÑ#Û ØÛÖë^0¨T…j܆í´çÈûgŽZÄÙçcCrG&Þ!ElzêdtšÍ›ñÄÄñ¡GÒL‰Âg«j¸¶Mú-¤“+¸Û5!FÒ‚½eU®’gSqž$¶XWy¨®qš™\f_¹^U õÒ‡­°Qa"¨ÈaMØfØà”`,¡…¥‘ X“7 ·5¨$#¤CÀá{ÞüäÔ-€‘œŠÍíñ‚S’±¹näº^ç¾{^¤Â0CGiÀˆÇ"Yo1Ÿj Ý¯žRç¨Üɬh3é;3qn‚ñ©öø)òŽ~vU/8@Q¤éë¦Æ¿tìZH!ñnyÆüÓ»s &À¶@a»¶«P¢ÞkM‰•˜¹6•˜ó>ËìôwNeìo>L!™ÓLŠn½¡ËÄS—0ωçÑ=ÄŒ¨%R  «˜¡ìW#:÷Þœwºh&ÔÖÐí¾“]ga>~"£m4&ÛT…Ù¥p–&[[Ud•*fJÍ7긛&q’MRîì-(FR¨Êè~{¬#Þ¦ù¹‘ÖeG”*¥éÕMQzíA­÷ÒkóIºn÷Fæ)+'% nÚÌ@Ø4GîÛ”MÔùÌ&Æ;V}KÍ´¹/Û7ê½²bª4û¿Yt¦Xò‰QdM˜FeYŇQ0V8Ëb-¯H!B# _À1$ï¬Õúm¢ vÅJë‹Á{qvoÊ7|–ö µ<´]øn¤@øI¹»=P€KYžÏêêÕEñsÚª°ú×T2k‚‘çéf~hžã``µ;%ÁâÌœ(ó~g2þ^ÃÌ5YÈ2¥E’°Xrí©h ²ó^Z%ŒYñsè#}÷£ú6’¨“ ߺvsù×®·™¤–EéëíÊA¼j>Ä QQxáf›óã‹ä³…#¹Ïä _wÞ#mÖðN§ñDÅUfL`¢ÊZ%ù“ܲ½à,5:ˆ¬øï_’ücÓå¿LúqIåƒc¤5¦!NýjúÅ“ÇÃA‹]ro3vü¥ c÷iP^ñŒˆ¬R"#O‡Ì_žNß`ÜLøiwi~oè0²ˆÀ øx¿°¾Ž%ÜÏÂýe'T®ÑósÖ.é¿{Ç+²)~sÿq!?Bâ "{€è ñUO`’¿ÕHþ¢p¨"`ì ƒP0×½¨‡U ÄX"dÀëõú½Uö\›¥ëçHZ/÷3ödôJ‹Þí¬ÓÕ??Iá7uË-ÿråÙߦ—ÙÜà1Å­‡‚ÓÂlãqö÷ÎüúfÜâ#i_?6Ý_Çx—aDÀÔÁ Iýî“!Ý‚B§Íkœ:ža#YžÉ¶+$û ºš#f  7›Ù°BéY²A:ÜÉ Z[ä”'D5~>¶ËY‰š6Ë£ÆTÕ¥LTöËVÏùåðê©éy‰:´¤‡ç3·<3ᄽd%5¿œ1‹9˜\íÝÖ¸3FtK÷UV<Áo DÏ?[9BËœ«WçÑÆÉ¼w—»`£‡ Ä”W7Ì«L(¶£Î;ÔŽm¹—Rû*•áɳ«/mº^6TÕ݈=îì³zm%4ß5mߤ>¡Â÷ŸÃ®K6#!·n æd¿¢ìÓñ²ìM(ðÔÚªŒC{(#¶‡¬Æ…×.ØüóC'Aàgí!íÜ­¸B¥³¶a†æcuZéí²–lÐrH ×8ô˜VÕDÂÚùrŠc*ò;»”Ñ:‘Ä8ÙXÜ“[²íÝ(–],\Ó2>ÊóKÎæ;FtÔHÍ9óت2fx¦õÇï $V¨ Ÿ«ÏžêcZxüù¿9˜Š¬64ô¡» cÊJ×§eÝ•¿n\­³ç³Ã¯:÷ÕõŸ+‹ÌÙ‹v Þ"[•<ƒ1¬ ¨A"u™ñ(ýváÔ`(Ö[aÇfí;€‰Ž–Ën 5lÚ„Mƒ éÑlEž–Q˜ãÏ…*kßs¬MJ˜«ÖU°Z·æ¨ÅM=Ía¶.1^~;µ.•YHši¦ËÎÑu>íZ#¤RÏÞÖn5 Ån9f¤m*݇yÑCÞú%+¼ÑEIwc“N´Ü™') š·~Þ¯|ÐbúØ=õ!»U®ŠEÑ‹]k1Ù~§ôû–ÎïnÓ[j™mlŒhLLŒ:Bˆafd“Výy ×Ü¿}ïÊs´öÀ‚¿yúœ¶eíØŠ vKKfBÁ0adS¬[½a_5]~ÞˆixÈ>“ëg±îYô!‘µ|ù,Xý¿zô‰É{¼'¦œg›\q‹…h‡©ïT¢4ÊnµNƒGœ’ÎMp‰¡‰T,ä’`Ž4Õ(„…ГZöb<œ$œYæ¼%L†wqÙŒ¥<*d­š*üâ=v­_F‹éóx¾ý3…[}†0̓“…_¦ÊðÍþ½Ûk¤g‘˜Š9ó¿56Ÿ bô0€HŒopQ“ršüÆÅÇM«‘4ø¾¹ZêN¢ 懄EŒÁŸ¹##D×÷Εñ<†Ý¸ÛxÊ{$=ü¾d#5Ž!JÑ8>{AR)«ñÎb‚ Æ'xsO<ös@ÿ$ û€{Fh`B z´zᵪä-®0’*Kñ¥°æŸ7§½™µëÖŒ3ƒða•µ5OÖpi¹o:e=RWEª¶Vˆ×4fÄÔ6Å®[ TPꯊºð(‚E÷Ú–œ%Ò]¿Þwm=•WLX˳WI;fÜà‰y‘•ÄÍ[›ÅÎO&÷K †>q¬¤" ¬DÔ0JÎ%UÛk×Ý÷Ëãˆ&—ÇÉÈB·5ÛÌê­‹ X~•[qWÊB©Âõ¼œÏ¥ìêù¬ PIʧ;" ôk  ìö(¬³¡ÍijP@¡ lÝiÊ·Gc!£qL(GöâM¢f.¼Fîñé…Ûb¬ÝMCktµ×¦ÒÝTZªou­åøäÑ›;¢¿;|NvÛfí×h¶”ñ¶0$’bIiˆií”ù„š‘‚SºCžÃóS?4ÊbÏ«õx¸V4‘_U± ›Þ=®7òf[tÙ†2¶œV¦ €O+í²Æ¢Ó}~Ö*æÛåÓ…Q' ¬›\º¯m¦·õ³+Ü]Qê¹r•£òßKîšC(/Xí”,M7µ=Ñœ3žæ¤+` ¸ºûº=×b··Ù®ÈF–}Æ`'”Ø´ÊÄÚ5šo¶îòMyípWz8¡´ !e7]y—ŽK„9:°›­ Ñn)‚hM™çuî>©Õ²}ÞêˆÑÄewó·V}Õ-…ý¬ä ý"Ð3Ú­ÝQ¾®ërp£ÓF!µ¹,5²[ ‘kÞìå¯3•vTê$°R bHó˜ æ&#l†(uãôHò?6 }±%?I7é^`Õ^ r+…á)ëXâ/ÊÄLŽcó†â õò׈ ©7ϧIZ:ÍŒj»dÊÜ—\ºðÖÔ¥QóʦJ†;P·ç˜oFö&fŒ0´|UsÛÕ[KÖ,Þuç›ùh]Îa¤v}+;+vuÚ3uÚIÒ]¿l½šºh¡C&£é$a…ÕBq¸QÁ¡Ì^ªœŠ âŸ[sºOV3f”e²«eÇl¢©ªÊÛ 4Ý|ZìQ•-kÛ¬Úmq¾­Ëušídñ²ê™c/îÔÎGã‰ö¿Ôõ™Äµ}˜¨¨ïÃ{US“–ã cûÎf+˜“Ôš°¦R4c²#Í=´Ëe-¡*Õ}Z©]Mf )ÃŒ 8¾u§fæ=L_³fµôÝöpQ7ìL“—¶´¦Š×_µ€áÙDâÊ"Ä”€DÌ BÊ(”uËniV£óÒ´7–Fßцýf*ŠQ‘Kó*¢W&À¸–•8dÄ@ LÛ-…n;œ5y®îF BЉ4šLgØ‘ØÆJÝú]òÝ6[ÔWÄ{z Ô¶Ó%Óy6+Eæñ ³BÊŽêÒ´•ý:^w¨:ó««*9A)ª¦ðŽQ}©óîí@\+¤Ñ‹mÙ­t¼«r lhc1¶i–ÊLN3™Ì¼¼nîCXY'Y:ɯŽ3ì°Äbòú|taÏ>o'É»÷±S¼lÏÂ_ ^ИÍmæpƒØC˜w‡N&°QLb'­‡6ŠTŸ]Oœ ü½ˆ}ùzî Ø@1 ˆ ÷Á:åì¾ôúoPÁ[®F½[¿”í«8ûóÉìIC>1€£øOÖ© >ïT™«[U‚o–sí½Óh»½£CéqcSÙ¹-›­­%°ÅŒfÖMÙõ™µ› ®&Ógk;‚ƳC¢ 67P6»°v¬,L-FÛÛ{¥:oW{K+’2lŒÍަ]MãçŽö®Åzö¡bß$Ñè“qæ3lC³Þ¦ b: ìag8Ï7dð»r=ctŽÀ“jäC#ÔY-¬¥2ibÛ\£ å]v¶¯±‰§ÿú’“øÌ“íÞžüýt+´2[ÆYZ«)"—¶ª=ÀÜÒÅ1~snµ²«\ˆªŸjrp¥;u'Ý«Œ¼äÄÓJ˜"I\„†õ…ØÎ[¸ô¹ªîê ØìGõWôšã^®Œ‚~”Ý£|u{±"]·i«£8àšg h}¶T Ú‹"òÚ’ÞØ©i sXhëŨ€®–»½ÒЉϹU zª†ÅP‘ë=žÒ:)mMÊÓ*&âŽzÌFÛxîŽl\Ô ùæ‹‹Ÿ”TÉ,U¾“v»í[Ïå_yÔöM”¾Ñém{»j©ŸóØ–¶»ºuI̓‘=DÈÚO·(do[LÈO¾ªÐ÷R–7«óOgWÑ×ÖøéÑ£l«v{»Õù1ôšòÔE&‹|=ÖB ÀiÎËÍ~uxöö#Õ¤PŠ‚®âÄÌ@¯Ö¡÷Î÷-á(漚  ‘<åæã´²ãÊ¡ží[zõU1ëNs¼VÍØ¢º-E£xš-—@ì¢îíC¦«'\nÈÛ¸µƒ©³£mº&¡¾Ž k|®¾8Q5­h«/߮ʚqi!ÚÕCkß)ÏûOÜqC^|¨›án5Çè·o ÂÈ_‡º Id&Å¢Y%˜Qdè0BRW˜®9HµžÄ~¤, N!£Îæ°¶YdnRhŸ¦<š ïáZ©5ÚžƒvWdSœPÀ¾Ò†¹õkåY¯/øÇF@h"œƒ“Ý•q¤ÑdmI¡·JØ÷mRéoµ:þsÉôn ’!¯%p×¥=z¿t–šÅkåá…$”ÝõžÑH@´7âk)iM,Ë4¯,‚ÔÞ²˜ÛZû"?c Èy)Š&ÕØT®§vÝkŠ­|‹bÑÚ‚Eõ±:“õp@(^ѦM5µ4´ÛW\èEŒ~iŠöµô·>ï·ÛSTÌv»%²"¢ûÓ‹½±uhˆéëÀ}3´¤æ4q®0ÀF¨Þ ;ÉKž§wŽ.çwHmÔ°à|« Qûi˜‹Â—ÒŠVÌgé!Fº `¶ fHvÁÓ…¶•ëÕW áÚl‚²Å_W¶ïtsÅMíɃW-‹æ†4#X³ 5n/l;é¯vz¤ökQ³˜”bé¥ÖÖ¶Ø´»vY]»®Þ¸ÂŠ•–JµCnèó‡nï?¹öûÏ„_!*Úu¡«_—gåLïr= žš¦ÊÊxkV¼F¸˜‚¯Þß%¦gŒ'3ÔYí ÅèõËf>ölbzÔO'̳ӟ,%ùæ,ð_x÷Ãæ,ÁÖ|˜.0%vM±4Ôžx¡‘ ÞJ‹ñ ~fKhT'É58Ís} `+õ‡»¸½ûæ·hòV_UgÕø^óhZ[‰._޼B¦8¤ªö÷å=$5Yä+‹v‡ÌM÷=>g¹¼jë*¨–Ì1øS¯Í=ëÀ4ìEC‰YÊ›~‰Ô!oܾøÏ£ÀI‹Üæ‚úÔc3ui÷8 Â-ï’â¦,Át‰Ö´¬Eæûsw´Ë÷¯®ûÛÍTÿÿÌÌÆ½íÃOE?>èD b×0<LjüÌY°R4éÑúÆÏŒRûwÓz4ôâth+"§Qôœù¿x3º\†”æÆÐB5ÁzÑ*ì Ò'+¯w½ž±á-·¼®ã’|õ™ßÄ=Ph¢%óMILÂuf&¡Ô6,!Ç”†tG†²âú«ŠËÝ­=æ§¡ÑW÷©Õ›L¾PÝ-1ú^@cnWur>£Ú“$÷ ‡ÏBÛ<÷Ïo ´¸*ñ¹ÊÖùè®7Ñ®Ž™ºSð:óµ„è…Ù±ŽµXöÒ“ÕÉÎkÑiÕl»NÄÞ»joœNç (µ­åñ²­~&н„­IThðç[:Æ7"Îc,Ó[©»ÐÚŸÛ$˜ ¡›ŸfâÌdLa´)³“‘šèŽ$ÈhPÐÆFb8F,6“»¸\à©Ëƒjò®æ‘›úeæÕñ–™¦÷ˆŸ´•z(uèùå{ ï+Üð4å5·‘°ÝŸ,áw£ƒw5ñ™½^3ãW¯¶©5ãl÷(åܲy¶;Š+~›Ü û]ªécUV©ü¾ r~‹µhE„Lº#-» Ϭxè7 ™ÌlW_Øì¹s%µŒA*SAØr3(K o5Nn év Êúl­›+î79Žë¤dïv£™½6ÐEö«5•§-»ejºYÈ* ×Üè=OªîC΃…#ZfÑr„«Y1E[j¦)"§í'OoµäºR¶eIÍõÝ7Þ|½çZpóRµz½z·u¶ò&y½ºº´L~v¯”8jnJI'™=^_k›³jzëá[|1ZèŠÍ{i4ÈÅÕYu/Ýw…åïmMȨ¥zâÒ™'çTךªhVºcˆ®MݶØ)ª&zȱ†ˆÈ.²D^ÖOްØQÖ±'E¹! Yr›„g!vé˦¬8þlŸdÚ};CXG-k[×sÝ£zôSZ}ÖZÕ];ê¢Ú¡® °‹Rpzzê5^ˆ”Ê ;§%8S|B¾ÜÞqÖøªBô–ïŽ8uL÷Po÷ª­O ¢Û ã{=+惯5åÖ{Ÿ­u :êh66ýŠ>Ò& â†ˆ•9·Ó¸»ÄrȆ7¤·™œðc¨j!kÌG |˜­ldæ\«NHHæ÷SW®ÁÄEŽCÉìÁ—ŽÍ%^Òxš×ß¥í¥Ø™ÝMÀ×\|»U»^ì&’‡šb|nF“mmaC¾Û>ß…v2i óŸ AqÎI”š†Ì,HϬ‘-(Ž^O ¥¹¥PEá­Þš¹HЕÙ:Ý—hõtD6ÓµmTºon“¢¸ù¢`çŠì<ìÙâq»b·"w:©I¶G’Üc’zû·KÊ=ôr´Ú÷5 N3Ñ’Ô5,®ìЉ?iÉ…Ô±¹³1dƳ<[Àá;#‘½–ÉŒù>ŽßOƒF÷ép’A8‹ljq»¯qÖS6UˆÁk3K‰Ww@GéÆV·ô¼Í¿‡vw@:Ã6©Å»zêëÒq®"V=[ђέ´Ú%›böÎîmÚÛÈ]]Rhv£°®TÓ¤m<#¨,NÉc‹EXºýÓ˪‚i­]äîï+¶ÖÝL‰’p¬wJ£–\¢4ÔQª†ÿNïx(F.b‘Ty©æºlz¹™i"ðª14Ý)ÆÉ”%ÙÒCjc· †¡z—mÙ‰ŠoN¤‚“ õÃ$1بÞ;fš}«1í¨Xä:V½‡äµ j2õr‡!_4ÉÕ‰NP’ð˜mŸS÷LÌãZy® ˜@¦ÛÍÉayí£jڤע["i¨†“{¥¢=d#úçŽ=l÷›„VÇrô¶àåMÍHii—F\õA¶ª¥î¹Å¢ÚK®ƒ¨“tÖŸ¨œ£2òSÏvŽky&¾ZSt¥ #)Š˜ÝJ´¯XâN…EÉßÒá­Â=Aöö¹|YÌo³ õxöD}óð£q väI˜È`·AÍXˆ%‚p®ƒÖž®8¨Än½éº·mf®4ë3\^ze[¨hÚš*$Ø" f™ËœAâÛºýÜîôN¡8 _jkÇ)7vHö&"°Z·Tõ¶Ï\v]•ÃòzUË]­!ðýýñbÜ‚ïßc;8Õ9é°Ñ¨” Ö̳7m©[®´jÞ¦&k¼óŒ«bìãË Ž÷—RÛtC¬–툵‘´H †I9©\‚¥M‰JIº‘Í~j!{~9àýÜÕûçÞ~¯4QWD óP°ë{¿F-&çÜ í?.'P8e`¾Iñ0f𩛦I\ÓOŸƒkϺŠ1ŠEá¯Ï–ÚþñìÚ×ul® H–®!Ô6Ò¯UàáÚšñ3œ#Ê“Z­Š`+¯2Š7Áð•3Õ¯ MÉb¦2±TE%߇Á>9¥7>rk¡¡(Ø•;ç8Ó'4ž‚Z,f@ùçÂåÌ/Ë!ŠŠ¨¼` 5oÁæ?Š|õñl/\SY+×a­"É™÷ÕS8Ö§¹O‰çÉÞÏ%e:Š"úQWñϼïø˜É9å2Ìû,‚D‚"ÓF#Ã(Gë=7JåäÊ!}”°’–(£ø÷ ÷óàÿUXûªB"Š˜#Þ¤@3•"ªÆ4i°â6Š ‚Ûj0™(04ôÍCÃûoòÿ¡'Kþ\V(jæ³H.ZÂŽŒâ7dœŒÿ\ÈYÆX±Å%³ÏlÔæÃÎoh®êa{,_çzSÆe7TZÃ[š@  ¹@¨ cÊ"L ã’¿¨Z;kd­­å…Ñ!knnæÝ¯å°Ò6;xJÔÑÎ(Ù¶:Þîš^tå^ ˜(‚ŒÁ2ý^c&E…¥a]È =Û1KôÜA+Qպů õPŽVÌë´gG”›4œm±ˆÝg<\ekP%,´Ê:îŠÚØÒl•ËGȶÃ' âY¸.™”æê\¬M´<˜èk÷d”q$>ý aÖ@ù…q]çÎ÷pâ;›Šp½ÇoJoxn•Y–9]²l~Ùêù žmM¼:y[ºh›˜ùXˆgºðt-ÁÈ*ptV˜à­¬ &þÞ«NëUË4ƒU_^;Q$øJ[šKTâË´W¢ÒÔÄ2ªÑy謦'‹²g)ñ™3oÉ©““Ñeku±JœÛa–ä_k;¤]>`¼%d`†Â;gµŽ¤íÝYQ©õèf”¸ JbíÞun³$ \J̦Dãè³n2§oë..34MgÇ$¹ïQcõùðëñø•ñK®õÞ*mw«"B-#"›$·~ë·ï-X i‹.9™(ÒÎîl#ÉXp¡P¨´xÌPbȰü3HÖ@ H4w²=¼t•]ž¨¬¢fD`ŸØlyMî»¶;ѽ¿®Æ¬1fH(+b|⸖¤ ¢1ùLÏwï´ëY>Ò“‚B¢•Kðð ®s †%™÷¶o0ÁÉ?E-ƒ3Ac«/B5& kZ²'ùLÚŠ&Ù$ÚÂCÖ+¢cÚíšh¼µàNxßm•‹37^gÝâ{͈oAYišg§M3zcJöfj¡¼û“ÑÀœm·ÏSÚU·#Ë#Î]V5jb‰ Öm&þ»Jü¾5{åár^ãškHTây š ©¼±KˆÀ c!„ã‡Ç}Ü+ #h kÖóÍòÒžUä QP L Œ$£p¡~iZ8Èå–Ù ëI»Ž>9˦è\¥FCh£"+IzÙ†A™¸[0Ì ‰kL¬«’t¬ÞK¤’雘ÏÍeŸê×Õ¥Êr<˜k]8©ŽU¤"…¶%„‹&œ³VÆ“Eæ§'91êz1Ó™2VqÄrΡˆy‚Êùžp‹ ßC&èa³Ì•¨j>â0 ¤Âh4Z– ôÓkb ´(Ø×$VF&={!×q’@)jù}%eãn’[Yg *WJ–T2³]‘Õ®šÝff$¢yá8æ=@οvÂÈù‘ün‹³á»þÛ3Ûv&‰ôŸ;h”_¬‘AË~r+ ¥ŒÍ%$ƒ1$€$kñkx"-(BL$IDž” ÞäAseˆAË\¢1Yt¥ÂHá–Ë$ … ù±&ñÝ£râ€Ù"…¸‡ú*Sðõ5ÂÈfhÌbËÀ  +²‡'CÒÈôŠxñêBbH6uù†ºô^[k'˜"ÈLKL—°U÷úî÷é,ù_Ÿ{¢þo©›òê™a‚/‡N~ìª?+a6›¶{×j×n;¢Ð>åBWjâê7fPYü{X„%vV(VÄÛ^ÙtôNðž™+’FD9ûô”IÒO5"5Øü5át×·?Z¢Ó£çT@ôö½_¢/RQNiH™Ï±sxß8k£²ÜÍ=<ÔónLQ5˜ GI¶ƒ‡.½[¤nµôÇ)׎ɾ®^ꦴÞÅ‹¢¤»\5ŽÞ~<ÇSóõôl !÷]Øþ1•¢¾ûxëݪ¦—šy§£ˆ–%` ±RÓeÖëogDîw¹¹êÜ뱫»(Ñ·cÔ>!úJ'aŒˆ|ËQ©ÌR?ªbR¡À*$AbzÐå ²z¼5Ž¢/~øïo2Aƒ#ÌpÝå³8å–V—wë«=Ó8Èw=Xwæz÷¡ªüIÁaÄ*9ìÍ¡Ñ C} d©8ɬ1‚út íŸwdÄòq¬ÎÃݹ°>åÖŒCµîißñ ¬®qÈ]3uà:=Lš¬ß·¹¾þV-××É”°ÊBÁ`„FœuUk!}+#.êOw¹:qY§^·vг[ÍMðù'½zZª2Ž €¢H-´HƒunœâŒá¬÷*ÍÞºo±¾ØÐ©¯VQ˜Ñ TËPä®53ê·n@)²[]pxÑ#Âä´m8ò«T¤hE%ŽV¥VÁ·wEW›ƒÛvìMô[Zî½5ØY,GCÁí³+qÌ(6Fw ˜x½éUˆ-)Œ•ö× ÕË%ß ß3ƒ¾úf¯NÏ~GéžSÌÊÎ*1:±®Íl=Ãïùyß-±Ë‘ðøÜeóc6:óéÝÚíÚåÿ€©+_÷U Õ¢6Ô®ƒ:}OÕ]OþÙÿ€Ä?Ò<÷æËÛ~gÃ&ˆüœ¥ýONñoéúm¿l0¦J=äÍýsÂ?{ÃÌøsóò¯äOaý÷ ë÷¸làâ– ™ö0z ’½Êóî™Èþ4Â3ß“õ'ê ÉøëcöÔ+óæëù^÷$Qœ´¶\æß³ôöÃõà“ÏÊOÒ"}¡|z_™¿¯~åÓܸòˆÍµ‚¨z¨Ò›?©V®æÏäÅ3×RØ©Aˆ¬ ‡#;iµbsÐîtßÇŽ¡‘oòÈ ¨ü=¾{ÚW¿›ÛÎ{‡_‡w£|¹ÿŠ´p2+ÙG¾¯Ÿ»ðÉšºàâ{Ùö÷é‡çâpþïÚMô>Äp6¼“ý»mÈBͯ=7OߣM_šï×àý«Rö÷剜cÔMé8—Ûû¿YÇ¥P¿Ã‡ï.[ôÜò·FQÇðfú+Ùûdã—Úxõ{Wì¬Æž}«Ñá#ïsßGr—ÌÝ+‡^óÕ0ã3Šý¾óoØ›Yk…£’µ,’Ûõîß϶ßß¼}#ïnûNw†ýG¾Í~+Ç‹1%˜…«Ü“ùþq¦èÚ®­÷ß þ—$n„²Úë–[mVÝÈ¢™ïÇïÁ’yÛt­ÎöûË:ï¼qµ’^ñÌy·ÜŸž,x[ÄØK1¶ƒ`¼{À6üRE”:=öýB}žeo¬ã¯ÛãœW*‹éíçâ4xwÌ*Õ…q’±–¨‰ß¿|™ùªÊ×Ûׂ˾÷ŠX^fbÌÀh^‰ðîâ¼"ΜϛÃï×çÑÍ>‹õ--¥`¥E¤h¨ØÒÄiÒ¥]%6Å $HF‘Ƈ‡ÑC| u‡ª`s˜Ûzm:cÖ1œJc°VÈÉ…‚Rý•%Œ‘„t¤4¼³d¶ã´%Jž÷=Ês¸-ŸîçqÝ[ %E_|„xâ=­]¹Ž]÷“TC°XEÁN ¹*¸âJmÖÞÜʱcÚÜØ{|ºÌ÷þÓWËqyÙÇ0 öí¹°¦JJž%UjÖ†’mÜi’È"”.¹¯w.Ï?!M͆IrX˜ÁHŒV϶jÓqÂBÕ²b¢Û¸àÇ l¬UÒÊ-,’*óáí’@é%ÚÔJs¦±ÚØ£B’¼áj.‘"• †ŠUd@é%l‘¥¤sí]ÕU+-ë\[tóݱrÛ/)ÉÒwuu³hàÚã‘r¾š]ײ̎B­˜óá‰|ƒòƒ,²b-æ# 3›5¹rKf±;­Õ¥¿rŒÕ~î 3[ªû^×kˆt‘¢nBÇjp;¡ÛÞ¹:]~ÛjÎèý»^yÞZº–ϧµn¯Iw¨T6áÃá=é \\(Û¬ó¾Ý¥õ^²ØžzÏkB÷'\•˶U¾$ꤡÐIÕZuDíc޲¢Ø›s­EÖÈð!;¥»Z¥V"ú%º^;SÝn[Fí‘QQwRÊœµz1l'T9ÈU‹m£×5GluîI³°kDÕ$ã"-R9Àjª•¿–IöxÕ>slÒWE ÓM²¶Ÿ̥߮BuQt‹¤ÒÙϨU7qí ZƼ['£{Õ³ ìÝL1¶–Ô/G³´çÛm¬#Ö½ló±U$buì}]Üw3x@ù®Ûײ¹<Üz>‹Ò¯@í´º5/MH‰^W®­–nQ3ì+dTJYH±F›PN.•lBÞÔS¦N1»â¢Ç»ÒòuxLSN8£^$¤%.©¬É8ª u½ë175ûT„0ۼɽ›}FI]lèZÉxxšcÉ­ça‘ÄVÉÖík„›ç¥ê[k`ãhLLÖên’ÛÏp3§J º´F€HÑëñ—!¥^ÔÔ~G¢©Ý2ûëÀkÙ°M#5u}׉½¬RVýݳXò™bæÃÐ-bOné|U¯ó´<ˆá¶*è™õf¶§Hr•¥CÝÓ½íË¢f3­SEG€n³¢ñOœ,;drÂrÓ–’$Ÿ¯fjÙÜ“=­¶IeYõ*I›ÍF2"à ‡wvM S4É fï[±Œ‹µ1OžævM\ÿ<˜ýÄíÐ'ø€ÿdOΠñU?Á^ÀöP÷‚~ÐÁD;ÀOت'p‰(~µôOH'Ȳ x€§ƒÿ‚íQù«íQ>Bˆ ñ}ÜÛ]otÉÅÿSþ ´s2öß¿ïXøuyžhnë)ÈFëd¿nêš –sÐgÜ íÆµ[ó²ío>ÕTt¹F„dM?]"_Œòj•§©âéÜê'ÖÔG¦a+M€Ž™“‰n-Þʲ s8px·ˆÔwÆÀG™ß>Eš‚º+Äpaíq} ¹Úòz‡œß+@´A¸5‹ BçRcicí¹cŠû>gnÊ»5¶º'fý°\Ïsªˆ #¾ ‘é¹Í“ª‰—,^‚ËXaJ {aê×<áèèéëŒÈq×’sõÒ/wžöûÄ4 ŽšÑ:5_ŒYj£¹¨XݶkÆoIˆ÷S¶ûz÷SØòûaÎr…ËÛÙˆ@ÆŽ:ñ?ž%ñìa—º(Õ„wF}™7Ü=Å|„N­ÚÁPmC ‡°RNè2_/½ˆ#­ÉÚǾ+àu—>i,6–‘Ò[ß'Þ+Dñ+Çß%½‰½_j”$Ñ::>Ø¿[¹Œ¤"ë}dóŽ‚O»xQ^;êÏyvMBkÅ ·x\+W4ÎmG»–:›I­¹j-FõR~çloÍÑ_G>ýûªŸj-`ÆŸ±zv`ÞÌèÉFE\L™Lš*H   Û÷¯Ò÷ï}ö›P3|6Þæ' ß±bnË-¶ÛmòÄ£Û®<^ƒqµ™žõ øÙÍ’ñ¹V÷O¬v­ÛlL!¹™Ÿ¾ý÷èóZ~OÇWžjœÂûi d…´E…·° bh‘„Š.ðÇ"h¨"‚e!HEQZ©2Û`Œ¶ý™ŠíC«k×±íªZl¥Ö ÒYôûU+¥ÌYíwŶ"”R'bNËi}‰`­ª!ʧ€FʱH„ŸYpHMH5cÚ(û»©ËwµU»]ãeí_°I „°Ì@ÁíÆÈáK¸Ï+¯¯ϰ²ÑÌr#ª^'oTî;t5çvk»e¶Â°GÃîC¯Àº 8lúµÉ$Š’þøR#Ç×4å|UUXÞp¹nü©>{ðÞ¾DUU (ª(ÉyÔÇçΟ%æxUô[ZÇì€S2¸f«iRäfýæjduïÉ$·ž«Â×êîÉkÛeÝ™­Úí{».bѪӃµZ¥dv'e•X,J<a€1,ˆÈÁ°(V§Ž—ßhký¿4}ÍôÛBÛhä²Mvº"ÝÙ´nÊRÛÑ$LMÈ9÷»=£¦³E5úú¹–Öm­TUb”skóg½ó¤]¢™¨a›ð|g§Ì%Ó+(ðÚ(0dˆB )˽;vïËÃí=ñø¿|ð0 –̈zn6:p=Rˆ‘¡~ø$Glp8̼^¥¹™÷ÀQõÑ^^ò‘òz–°ÄvšÃÇÉ®fAι(ß|8ô6¸Ìg?·2r؃®†ëÀà%Ó›E ‹Xà_u„º+•öç˲à«vØQW)$ჰ!š®ž⻆=Ì~SsþªB¢©dl‹”âAù/Ãà‡ÃಟÝïg”ŸŽg“·½M—cõCf¼œÙyØð:—©ú‚¹»7L»áß "¯¾y_…ìò¾Û äÞ2ß´Më”jë׊¦\FrÂÉñßn²Kî»ãÇ3ÚàXW[N‰ìûDÉ¡º&öä¾éN‹¬-'ÇǪ̈M“Æ´8ÎÎ>Ûå½|íÝc½ÆÞòk½¦QÁ`£x0㾈˜ç³t}Ež:ÓòÛ6ìÜšPó¾ø|ô‘Žy ëxŽÁÌáÊ[Ü÷¯†bø=L Ü w½žÞÚx,ûáóÈî:·#]½-BDÃâ@ï}ã@ƒ$[É3QƒÊzï^>ÛÃ×»îxç_i$¤±¯N)w­ñ·»ŽŽö¿ 'Ëác7x?j—½ˆè㯽—½Äçz¨o•/½Í1(7—¢ôNk÷³3ÚGWö`ëVûi¯G'‹É½4ÍÑ{=ÇWóL)ÕK=šÀûïqk{.¯fßO gªmð}@"í’ü¼¢Í22[g¾Ÿboˆ_}¿;ÇËÞžï(ûÔ*Iò^ï,VÖD²<è%}·NÜOåŠ<ì â÷½tÎÞÎö¡ž›·²^õÕdÀ¾½‹‚ðõ ½(¹ôûãÛ3Äî3áí3}žïUÝô{á¡Þ5ø,OE¢‡.@üó“8ºîûï¯Þσè#Þîž÷vÃ}‹ öÎïjíLÁSÁAyo<6÷'Äöç”]y{šËÌ^  G‡Y¹Föq ÷€Ñóv[C/ì òä®#W?¾WåîÐïw™/ÃÎ~Î~ß =sºž=CÀãÑVïÞ·b­¾ôßw¥Ø5)!£q?³‡¤JK¯†ÀŸÝèµûrß{¹Ý‰WÀ÷¶eò"1“|]]…æjK9ùßc£jÏuôwY͸~Ãö_B÷¯6­Ç=hóӑ윴õÝG{!^ÞY:üõ:æ£89>Á²ýÓ¼·yƒª;ÍÛÙÙÃŒ 1¡áìb¼ƒ–ïà ¼þ´,cÞïw¸èG'2S|ȞǪ?,dŹ7q³°?kîk(ñ? -Õ¸QÄ> rûÛÍr§¼OZVS4p^Yª}y¡¿%§Ú„Ý¢Ü j-#|î÷{O²ï”+ÄÙà;Ä´·ÄëʾçÌ›“qý~èpêùg’Å9é I<;XˆyˆIeÄò¯t2ÏM[ï Ê{ùk4U¿w‘ë@_{ÒäÍÞIˆûÞû5Ù÷<¯ÙŠ3Ì]ײᯬ$rž™Œ-°Z3wr`>‹Ï-ùûMn‰Þx Gv¶úÉìÎ ›×†{šõšÜ’_y½Üôó½eOͱœÁãŸ*Ù§sw¯:üóE|ÖPô¿?LÌåšSÒÜøµÛÃ^Î[Ÿ{´ö*¼s°ù ï7Àb]èQW¼|~(H”·7Þ†ù`nöñžIä÷³7Ò-y¼@ßAïo½£³ÞYw—VŠ=Ù£Þˆ.˜¸3…@ufË¢ÇF{êz¨öМ#¾8´NñÕõ÷$ñ>9z‡&{ç±O/ ñàÁ¼ýšÃ~\Ìf}Òã8œâÆ÷»zÁ†sØM/ƒ ùKï /1gw¹ð‘:–du6[mÂL‚4ÜÑéä7&ï·6ØòÑFNU¡Ÿh#î¼/ß!ZÍyï]È!ôäá¬;¿>×= 2CØýëóç í.ä4ÈyÅá‚ vzïÏÇ=ïx¯½D{}ØDáôYßNôŸiûàÓ~j·÷‡‡·~Ùï€ù†/†I;Ü<$§ÎyLz=Îc·™Ó›»…¼ U §ê¼ŽòÛôhg¾d1ÀÞ{ÃÜî_5>÷«® ºnÏNW¡ÐÇž¤¸éÑqç¾P{Ë ‚weÀö=.Þ+Ú}ï½Û=ï/yÔ“9™ç^o¯ˆ>í÷œ³Þà·v-!ç¤ÖvævcQáG„ ¦vö좬Yî‘g‰~gÞ(y¹gîFàCÓºÔÇ_Wìx|ÕZEÌly—ßtAÓï¯w·â÷ÜÇ:7Ý÷Áfb{¾˜qIsÓìõ¾ofZ99CñÜ϶śÞëð(Fû ÷Ã=ðoÓn¾£ïwéã¼3‹{Ó(Ôü¼¬ƒ)»í_eñù 篃p]ï/A¶äcÞÏ‘¦E„`îñÃà <½—N?)OØf‘0­úž/ŽtöjKIeïw¹+HÔéŚО úßxæöX{^S|´vú&¸úÝÏ47Æ“ôú¯ox¯JfÖq-èõGž}½Ú®9Üïsk•6V=Ó=ï{ÏÉìòÏyWãžO8ö¿Qëäê0™F‰ûw{Ùî(ô^+ž›©C7Ù–®hcòë¡fdÑá=éÚµ$½iô.kç·³[w'ªÁnÞxýtN¨ßaC’½›½iæ­§,y-'q™‹Vk˜Î/'ìÖŽÑ1ü{ww·Ü_iF™ˆžffüqí¾Ö»2Ežòª4ùÚ¸õ¹š}Èk‡sÍÕÑÓëìå«3ØœŒ,3÷3Ùå7Þ%Œ÷·$#³7g{ÞÁŒ¯½+ÜhôšOžy•ÌÅØ‡U¡û{sYeSµÌ>g|³BßxDT­mð¾ «ñßó¾öŒÇy–=ÓÝãõñLösõ-áÕy3rKïb®O ¤q¾ÒÑÞ943áÑvbÏyNö̯Iëåܪ+Q3ÔèØs¨êpÓ±¬î¸¯7åâ.äÚ„yîòŠ3ÜsïG9pÍs=àõæérlÇÛ ä{ÄúØý™œ}eT²Õ•JLqã¾Ò âÉ[óA¾›T=&˜÷ÆH³;—Dç¦ûB¬õÔƒÇÛÝu@ƒ¾d÷–p‹}YÏ#e2Ñ/½$vèæ¸Jô¹([½½|.{P}ÜM§Îë•æç‘ÜÞe#†žËR[£ÃŸkm®ðÃ~[ñ«kö}£UÍÜçâí3ÄsS;3®{F0¼ó¹{yBc¦|È@©T°n½=Tá—NhuI6Íð?^öú—б/ šì{ž”àžÛ¾ßA…ÍÈ÷œîÎa‹&¯¦ûÜ3OÈfè‡Á{èµæÙÞpL³ž;{DÝyÜ»¬î÷»´œpâëÅz%rñ¬Í|pO1›ëgŽ…å“9Íö¨ná=¯Ùçîf z;ÞɶŽwosTg&œÍã-bÎõ£q z»(!ê:CÎ÷8UñC$Îã¼Öe†Áœ;Çs7}çjÛÝh­böiòv"ö;ÑÂf¨óíï¼-õí÷šèÉÙîüÎEfVOA¾6ïm¼ç¶û¸×·oq˜Öj0Ÿ=í<òo9}‡^X„ô[ï{P=G{ùø­?/eSH^éz{µ{ob;‹Ð¿z¢Š×Þ`—.m|ÖÝÞ¯/u8DJƒÝŒ-÷¼}ÆD…¦ÌãBö^ñk§GÒ¾¾É¹í½÷£é«=è>Þ8¼½«n'ˆ)Âá7}Ó}¬ùòK¼‰Íîw:û1zç€öË›¹V% çƒÝ×½Ý:ûVí+Ýèç%ž~Ã7Më ÌöƃۭÎ]·=¯¡Ùmöw³µyìß{`yÏÛ»9ùµË¶”’ÞÝÉzC:\ä–w•àÜ«øæ Õ×S²ìëïsíowtEB&rö¯#›¸¦w£z=‹yôÌÛ3¬÷^Ír›Që²àš§©Åí_?)×Ùïs+-½cP’¶kÚ}{ðF+ V ˜s0àܪ䗗o¢'ƒX®x†p$!ú/òÃ.øV è†ôjmšž÷{}é¶khÆ-Ïë”@ÕšžçéÓ'´ôâw'{0úƒ‡¦*Ñ>½ïˆ>ä÷ÙŸ_¾Ð úL ûE@ó‚}÷À󛜓J÷·ÂçßfwßoÀu–b3m¶” é'ÄzpÙîÊý;Ÿ³=òš¾¼# |7îýñ!ÀÇ߼ÝÓçvΙ!óæü†œÀ4|¸‡sôÉ€wþã–=¼|XñËóÙ(D\#vøg¶tÑ¿g‘YÀßzL{}5ô´-öÞË}=qkšòg¸æs^‹ Ì@îÛbï4û½œÏpËF÷½¯_Å:üpvnxùr~>Þò¾sG{t%“{,Ù·Þ]å¹t™îçfhk/{Ï»rŸ Båï¾Îlk'Cö^«×4;T°øêÔ¹ì¾@û<|êå_ ·Ü¹ƒNžBÞ÷¾ú_†Yœ;Çs4î[åñgX‘éÃâ¼7O·}RQæ«Ï©ƒW#7rµÇÜòvæ_׻Ŏ#QðçL”û9ÏlÜ“Ýç—½9а ï»Ë…2¼Çn{5:7瓨S‹¬òôd,¬æ¾›¼_¶± Û¹Hß>‰å ~k“ÈbÎXHÌboM;"ç£T|î{µ %¹<Øö)k.fE›T‹xô´“YÎs‘ ™æ ÃàfðÂw‹érË’LÀuO{Yg9vÀ¥áÙvûç‡íì£RÊÆÎß—Jkºd}¥åð]æE'ÙxB“ÞÏ2ƒ–âK¡¬ÖxçZœ¬|¹pÀ{^héÍáoV×Þ$Úo¾Âsݾ~Ý%⇎À}·.Q•xöp£tÏ ¡o©÷ÆeöuÊ„oÜ]}YÝÚß ïJGgag§œ.%·D:Q$žçì!7ÁÙï4Åî»À SÀúJAÓíûÛs=æ³iØûo»Íj•_=ÄÀUå¾z@Ie|l>ô雟Mòô×÷waã¤9¾Ö|tM溈 žò{4³†WÅSî˜6cÚ‰ïu`ú0ûzîöç{ ôÅO—¥´Ç¾ò=ª Žæ÷dr/{z{Ó&{w‘Ä ‡C¼ï}ˆaöu:¦ÊÇ,rÞö'™·ö™£Vý÷ˆ w†OŸ¸yáÜé@ëÃ8!ìϯm|V_"ëSÂú¿¾àD›t>¾\ÑÊ(ª|øè $àOo;‡Î,Ž}ë9%’ÓÛìt/‰=:ê}+ß1+ö{Å´fð½{gníØ‡ÝG ö¹|(öQl&òMåùü>`Wâ3×°z|øt³;Ì·ðF,Òùùó*w¼L3¦úîTÂÀN³}"ð աw½Á1u<üבöwž}›“Ô;àûg7X}¾Ò´·}.¥íöû4!sÆöyÌ8¯xyöŒMòË¢ýµ×Lc¾÷^%ä_OlyìšÂ3Ýâ”c‘aLÝݸÌñÌ3Ùsë»ð¼œ`ÂÁï ûïwŽ „³¢Í÷¹•¦ýïvÕZÕïSs–ßi/ƽﯽ\w¾m¯:{ÞÜL¸š2àváíÓÌ?¹$Šï ßO=Í×ÞÌk}UÜÌY(ð@…ánƒU¨Ö'/½¦dÜôÐ;'‹™í3ÉxköÛÞöê÷ŠYà3%ÏZäÞ§„åd|<å5´oxI‡ÝŒ{N1Ö{ÞHaÜÖ‰>æÜá¯ò[‡|ŽsžÎ ¡÷Wa‹tò÷²óÏ}Ä5Ýݓǻï2Ç»5|/ ½ìsî Oa1feL=š=Hº Y¼·Û÷¸öUNû6ÜÏv";mì}À1¿ ±õ€xµïL~ó|._Áž?| ØiˆQsëÏÑ‚y´ì^îϾøO¦†ÂY§•Îyì÷®ÄÞ*}á=¨LJÄFg¹_sæ¸{è¬A̯Þö ‡‡pÒ»` hïžÛ7<Ž)=ƒK®T¼# ›:4w½ž}4æ}³–_K„GÐúg7O w°WOŸw¼4KàH ‰õ¡¯œ÷¦x]óî,@ƒö –Ñ~_‡»u…ßmÁ˜ÝÊ2‚ǽru›ƒŒúM¿xx{áÃvý`‘)%å£XV¥#k9»ñÒ„§Ð<âvïÄøØ${c³40Pô6¢ ãq‘ƒ}¸Ûà7Ù}Žæ ™ö‹žy Eïºäú$áØFèp2|}õCÜŸ`øg؇Èy?zß¹¬÷¯ˆ×7àOÚõvóû7%Å€ßÞ»x© æ«Ír(æÏo¸ž$ÎãmÇ™ãasÛr=ÏuÏ #²n{Ô2ŠLpÛ¼JÅã»O£íD-öß&ì#80»Îh’0žW—„÷»‡ˆ™{°Ô«ç=£¯,$l#=’'èx3¸; ûZ í´7lÅ”*Úç a&E[ï-~¨€—æ3겕·ÍûÁn0¾C~;Ü;á¹È{wÛǽÞxê¾çéƒÇ²ˆ¢Üm¹ƒ²P¼p÷ÛËÍýgpfݛϥ˜U¡ÊŒ”¯^öœðܺwÍ|z£|CÎ@œty¦ö'žš7wÈùÃR×D½Ë{ß|Ž%ð‹€V.Öƒ÷<|yvˆ;z}ïwŒïÄ®óûÒú°†y>w(5oC¼ESsÎá@ß‹óòÜ¿\Ð&Láà1÷‚ë=°¯oÎF9ºA7&í¾|‡¯xIÞùxô½â¯—qÓ¹Ë3mîò·Îº,Às×5.èì5dÜ“<@ñÃMmWà,û;žŸ{½2ÎÓ¼s *9ÛSS½Dóïh;à=ÙîeÌBnøž[d•¯uŒê/-¹žÈW½ªG¹ž:Y¾HÀŸ‡‡T{çßu‡½>Ò¹æ?y&’Ÿlåê=¢ù¾9ìÍݘ˜¬m\¾}™£o.6™­ 'ï­8³=Ú"õ÷z9ŽUžìÚþ8<ÍpM¡Œ\ÓçWÞ.‹ÃÎ ;Ÿ•¦ŽªMõö™½L›Ù2ø'¼ñOrÚO³Áù ^8– Ø©ÔýÞoXØÇ{z5ï3ËG{Ù>ãîö0@£ñ/¹ñ?h+±ùÇžöíöèͪ?F¢÷îñø¼Á©ü'Ãr÷ºAx6a@’p0Ä‹}ê­ÑsÂáiCR|_<5â2ï§¢ß.ðï½öîÉYš0ãS˽Ɔ~ôÎÍ’LÑ야tÅæðpΚ7»O_l—³|üë´x{`¦0âˆÈ“×¼vÔGt[Š8MX€Tu×7A¹áÙz92xX<{, ç{ìÌþð»ÃwŸ§² «‡ X#Ùèg›ª×§=:röµÄôy|}¾ßMÍîKÒºsAõû[Ïzr­ß„[ÛäWVǪöT0Î:4™À¹¢Z¹M…÷µœ×½£wЯ½^p4fªjÈ;=à¶.ë‚èkÇ—f%íÍ+bœ½Ý±§åéKîÎàÐÚæÎÅÞʳO_x„½öïG  jE“NÎÅœ7|ûÝÛu!£´ù9±ùÓ·š÷/=~=ìÀû΄¶ÝÅõ›‡Ù¾÷lòæòj³ ­÷¤Ù> f­]íFå<¼G¬æÓõÛÓÁ±¾'xqû!ÎfÙ¢™Áç”ùªi~o½O;P;7`=ŸLô;êÖzÞ—{Höø—ä)íî„tgØ.¼»Ë(æsvvã½ÓÛâs6ú¾Ï<Ý…`€òSP\¸›„u î¥f®Øý„\λ'ÍAºY×ë/sí»îôϲg¦½§÷æ)|Áb¬ÉQ«)ÎoÞôå}\Mx°ý‘¬óöÅGO_~Ë$[ŽÍôÅØ­r5š¢²û7·òß,½ÚÏ4Ò>><ĘóÙá¢3Ì,KÅ«ž—ããPŽ>ï ·DÞ‰$/°áîx.}ç¸L^ø=Ñ^àxÏ£8fŠíà›ïwt^=ï"’#Žôôíö­õlŽÁa;˜“ ¾Õo§3ÒG|ñ]lh𹞳*à¨öM·Žh÷_¶ïaÁ{Å{wå‰n¾H{Ù§Â=ó÷“rfyŸ˜òútA»çŸ7³˜ˆìŸåZ:¸fÄÝ|Fv{Ïx-ή7âO[ËÉÞwÚýܶêpÜ[ï Ùéê2dw‚ðéG¬Íóììñ_?=Ú1ß;ç÷xÆ=™½Èw9“PÝ岯iy—ɹë‚b±ì¹‹” ¯ˆ? ì¾ÜYá©{;×5NžZ‡^ñì|«Ì—“8š;*ý#Ún Û´.ÜNʤY+\=ópTC¾‹yí£oJ÷ cÒâsÞ+6‹ì8¥÷g‡‘­kʼÅòž4yè«ØjÞ`]¿9â ¯ß{âB{ƒ |;·…8ø¨þøxöœžaÞRSº'b†Ñ«®ü3nîÝœw1{}ºŽo nõÉíFùÌé‹7ŠÕ©#–O}§¶úow[ván2û½ç¶ÁÙp«w8æ |?N2uÆ2Ëe’ >[²; «Û~ÞhlÀ×v }Ì}«ÒÝǺŸ,Ý¿Y‹9cc>ð L²/^¯Àû*Þ>ÁSNhÝ7Ï5ÒÆî¯º[ï¯é>³wÛÇ;ÇÂD3üó §ì¼÷½ ñ\4ú`KçôјF<ÚOFM½Ã=ØÈœÊrHþñ§Ë<=ÏÛëÃQWêAñŽK™ÁzûÄÚɗžÝ;Ë_q%l =êÎÀº¥ =GŠ}¸‡ ê1f>ÌTíéö"&hßn‡ytQØóøû³r6³±´n{|7±/]6¼ï‘:ÜÞUƒQ×5¿fc2líÑ›¬x6œãÞå^^”Om1vò¸ {–³ÚÃîpƒÒ(“”%6xLê»ÙðÔ«<élÇs{Twß ŸˆSÊOov,”B·Ÿ¥æÞß{Ú4±ÆÙëÏ:²KM›¦a`|9h=´¢ŒÞ^é4ûé³8ggšlbrÎÎ_?×™yí÷³dמžÏ/jŸ=Íp Hùà§Út?‚'Vß0dñxð!¨d‡¸P˜§¥îlrîëøáíC{ï‡ÂŽwÛä2ï.Ø7Ÿ×Þç>Ôoš>ah^À¿pÒ`\Ì$'tù÷Þ¿}ÅN­¤tû$šI¡é°=÷¼çǾàÞsGÄIPG§= §Üu'Ø2Ñ›wþÄYŽ3¹Ýëä­»ajûÛ‡¶ÛËÖù©³íÑâÖÃÇf>»ª¶`C2 A¸×Ý \5×ïyQ¨vT– žXž-ÄÞ‡Ÿp|o¹/;}Ì÷=Û´ÈI°øgC„@Ó½'9°ÓaMÐ@Ëœ'NÎO©ãlÑà6P{NÞÇÞ¦P¢¿÷ Æ(ãîîÙ Žu™×Ÿ <_·˜è}–‹ç ûÀ{ßGæžç¥3¯Ÿš}UÕö`O(0_{Úú7vvæ ‡NðÃFw[¬ 6ß Á˜ñ¬Ïº#KîÝûwtv9¦Dû܇žÕà<úŒ KÐ;ômûïb%`æÖqعÍå3š¾pmIš(ãÞC-ÌÆC Ï·Ð#Ð$=K™™Ý£ÎÃÔ‘=¸R<¤Æ<¯\d{êçÛ÷Î{7d‡ÓÞ› ,žž’vgßz© 3ò»Šo)‰E™É\ç‰5õûãðßa×¼øO„ìñÀát‡}€}xr|vÕ´IÙö@åƒ £™öJ@sovyãï¾ðö}½{rÜÝš³ Õ¹ìÈóB©yúâCß¼™Ûóñavª3H\ŽÞc×Û{ Béð¾öß{î|}QôÒžúPîÏŸ>“îý9};÷=O¬ÏL¼Ê-ï`ø|ëGµ¯`ç8ÔA9ø=ùœ>ßróÀ€7q³ ÀÃSW•a_w®!¸ôŸ}  Þ 0olöèW܃¯ÜËý-ÝÚº]ċ°¹Ø’W»·Žé÷f?`ÙÇÑî·Œµº^Ë’¾Í^ðl3‚4bÑy­Ìú{Ç~˜‰nÝÆ¶q'®Ìß½y&½¿Ñö§Ÿo–½øØ¯zIÂ!ÙÛÍei лê7Ófv(0ˆ¼4xwx9òô-"3Ú}Ýîð:EUi¡½žóCÊãóhòŒ÷½‡§VèóhYÞŸiâ3™¹C:¸îóð§öZ“Ïkbx}´ç\Âçtó«Vf‘^ÜwÇT–e—8ûÞÇâóòöðÙZû˜8 æyg®² ÷˜Á§`fZõÔÚÁ"#Û’ìyfUîÔsÏ»ïA‰º"÷™Í=\)çµï ÞðY»ÛÆÉRÇñfwÞáÎÙï[Û''¦Ý÷Äo†‚»zuJöçUî­Ò^?f¹¾ãŒuåOictù Yã»Ò{|ÊTöä|ùžàd pBŸfâO)Ô+:Î,í~ÞžƒŠ-èsÞ;!ïoãG½·yÑ(è¼4{=Š-ࡹWi&Cì‹rfæôÕÞ§Žã½é¼ðT^éÝôwØ·Ï–L‹5ôˆpÑt_½¤{ ƒÙEfœBìíÆ8ç crÇ;wÝwìÃ=+ð{òƒö=†ZRS}ƒÖ¥Çb>î]Õ€å¸5ñ÷Ö®îîï‡Öâ#–žž)6…«iïV¹ø{áÃÀo²€< Õ¾C›·Ü…÷™Ðµ“Vãs¢ŽŒ¾g}çžëÛÞÑ€çöš§ž|áņ-7Úçnó>ú^¢wkl2&pw݈]í¯Xõ¾·BY{¹Ó¨¿iÔ`V¿R¡N>‡Þ<÷%Íf¤÷Ã}—Êì Ö8=}uoÉÙrs¯8ææöaÊ$Ý÷—³w½™€ë<¯nd#86ü··ÅÜ:}'’^·qeÎY7<}9ž`å3‡oLż^zÜ×_¢çÏNKïg‡mä|O½Ft³!ÌòöÜ©osN\˜W–{NÙ¡]뻹{bv‰Œì=á_7Ždö‰–Ž~S¬ÁGGìo³ Šíj õ¯G¦îµŸdÆã)?Kg?—0—çíÉ@Ü׵йʔh_·Ë•{òæÆŒ÷µØ:‰¡u×*+%$Mì°ræ7°é³½ïE<׬ÍY¾&_:IkËÞ›µßA™ì½¾#ÐQsA÷B¤/ÝwvæÃµO9ö¼Ìß<-ÎòÏzRkÎòå½ÙŸ,™ãöÕ1Æ6ø"¼Ä«^™>ö·Þלs¼saGÞuË8weÓrÑ±{›¶'e¹÷nÝùèÜÅ÷|þZµá<ƒÄ½JõȪß`/Ù¾Þžç¾Ï-‡Ú¼ÃZì6<(d=íã=¥´)†·»ƒ1…¾óÖ{ǥũ?{´¹_{f%·;N†¼ùuØjôëÃìÜŒ#§ÛÝìÇ‚»Ùâ5E{úö»ó‰øÔ$Äÿz/e„¥;Ðð¢í„" ” ].ÙBšA8oŸwß{¹Ó;Ì !d_ÐC?=ÿ;PªŸï ?_–I‚iÒÍbÆ­VtUAb­µ1QUmŠ µºð9U5Exþ°á"R%)J'ñ_»µß'F¨>5A5FÚ. ÷‚Ȥ!Ÿ§6óÌç ¶ÎZw(Q—gë§;cÎÇ6‰¢d(»TÈ=»7˜ãç6`MôFÜÆ“*èÑ™,teRÿ=uéW• pªa6”! ( > oš›‹v1QÉE-"oRF©¶ÞÒëuX·7uE"¬D'‰÷¶É,Y'še í1%;e˶2†I)uv ÒÊÇ…m+9Bæ ¨Ö¾È(ï“íუ%ß){ëtZ®€¢Y·’/OŠÓ»5Mr|ÔGå¨S!Ãë"±Â7øÐEZl¡‘ŒËö³]ØÙ*Œ^[±E ÇMÕc„ùÜêšÛкIÂÛ"(ÄC,øŠYµ§‰G äp’ ‹òÑë½Æ³Š¢ ònÛ¤¢j@ Z{b‚ÔbFúXë¤\$†¤;o»:ÊË]_X˜Ác6• Õ×e Qµ5“b÷¼n¯GT5ÏHKol 篻IÌGr¼XÙ”b¦"FÇ2Øà.ìÙp‚Ú°îe«Ã8êÅ$СпY ’5ŽOsªš†Hˆzö‰ôD#„„õã‚×pî KnmòØã«‚Éêô÷x‡!#Ñ–QÀ,¸Ö ‹Ô/3r£[œ;;v’¿ªÕ!;ëB¥²Z«#â0«X)†îäâh2¼Z-É1²™` 5ﶫ昺@£ªó¼À5‚ù¸ŠÃ×AKwßZ´îÛ^‚åZ…­x׬áAˆ$õ“ïjÖÛ¹¢/”ØÕU*âmÁ6„1;„Â(ý·)`ès¤%"b¾°(YKt†í+ÂÀèÁiAµ #d•|ÕTÀXô·ÆçæŸqóøÔÝ6KôÕN7±fÊî©à-6ªÈHÔÁÈœB@†µ…–!ºåü¨Tì«m-@Eš,&Âs\Onñ݈V¬RÔÓåùÚºq’ D£‘FIkPªŽokšð~™!VC¾"¨j·†$̘²çšÍ=ELa €±AFA §×4€USKd LI(ÂS4œ‰›rÎ!†Ó›]e{™­ Ù’4‡ˆf7š8REÄÈÞ ºR³ë{„›ˆaÏŸ4×+q§KqH–$‹cBnÁèä¼Ê©vg1®"Û’ÍÑëyé…. NÃGmÕÕUd$"ãÛãCº'ίYZµb_é¥ÿÛˆõ”Á>aþÎTx¼3*oâà‚=eSØãß Óâ¨"Añïxu„èw øŠÊÖaòyåÈçµUR#‹·7VÁ¢o¥×¦Ò5mëLš™6Ö« ]:9¾öÛ+ÔõîGQ#&úÐU“m'ÄŒ$/%µo]öâ÷Ü>¥·H œ{ÈÊùn›”ü­q+KbVÃõr;¤OU­Ï/&2ò.p†€nây˥嵡'Ȥ[…%iÌŠÅã¼Íä&Ö˜vçw;2MãÚ7憱ó^ €šÍÃ{Ô­î‹^ŠW¶¿mkoÞÝOÒĶûÙX8ÝÛrœ»j· º^<`‘¹ Øví<Ô¾x¢Ø4¦®º5¼«­ÙƒÔ>}ÍhWÜÙÈ Ô³$o)ÖŠ©ÈK6V£éÖñÀçvÐ’È+AèÁ“¡UšIbçͺfjx㞉ðªŠl!Ñš+šMÑ¿z{kÞéœBYáØH”V.°å((³V ñ"štìÂb6xÚà{·ŽKmj(ož(—µÎoÜE×WÌ‘iÜ£3^¢o)ÂÝ·ÜÚ³Ûì1WW·Xèa%ç±oeJÙm¸›ƒô[.!@m'¥é6‡‘™€À¾)»6¥¹r6fWŽ0{i¨I’LÝ€“—SÒœ]÷žØŽ`C¯dgŠyâÍ&ìÀRÔyk¯bb&ôÍ»ŽxÒû…ëW¦¨'¤j˼/_`ìݤfp’ãÞéÁÞT¢Qü%ú]·At¢$£c26[U ½³£{5iµmO a¦HdA=z­ñšó 8ш”0W¨c·®è qÌ2 ^‡ G¢òQAÖæ¹Å{Òªíõ²òŸéYi„ £6Å(©­2j*f×&ê¶dõ×àçÓyYí®ŽŒ•„'J*S%¯t¼zÞ!½Ùe9xÔÇ|A#G¯'½4Z/v:‹D†ƒ¢Ô³u3 ÀFëm¢H5JêÞצ=¹8ô ™ ºÓuÞQh%_M „%žöí\:¬Ý‹l†-a¨g»Q8‡H¶¾Ïv¹ë$ñÛh¡ Ÿô߇PéôÆ+‚ƮȆk=&»ºðfÛ›t‹‘f›2îd¤ÙFh¹•}Ïyß#ÍŒY15‚)0Üì˜{yeÄl„žš4δ;dj7 bPÏk¢o}©ÐÞ&îõФ´ž~6ê°wÞÙ6M]^.]žs×Dx—D°Ñ:¤”ͳ) Ya‰£¶³HFGI½\H8“ÂuægÖ"åskŽ/U½¶Q Òú7 «Íövg=ÖÎ}í×°}=Ö ÂÕ|ÝÄ·"w¶O¢uë·³w¤ðì¾›áD`#‡x²\øÃ™qÁtC“ ÅøÑ\µêÁ¨Óvµä–yåbÎè—ëÊû¸ÊTUÝÑ0=oçý‚Abˆ« bb¥˜ª‘ˆƒÀTowëgýc!ÄÃû<ýÿÑ¿ès®‡ú(}Ÿ4JòdïÉ÷v<»§ìÜ¥rDa‚$Dxb •Ç 4H”ÿ&Ñb«Qu¤%ƒ}vëm†€®€‹V­o䓸ý¿æÿ]þfÿ<Î\›×?¿³/k˜NózŒh1¦®ÛzËZẫÛÕ-§ºó¢‰­³Ú™\Ú§>›®¾a=J{MÒn’öë×ãNm1؈¥Ó ‘cL!Fˆ"Å×CϳäÝT½ÌÛC0îú@l9®Kˆ ëõ{ÂiW9v˜&"rº¦ZÞzõ¹¹¥þƒýÙ E#@ÑAE@ÓJЭ*”…â‚‘ë¾È1­—bÆÎ±ëŽLDÍÍ´d—A˜(˜¢˜±fj Pfj ) Šˆ ª+Ì }¢lÕQÔN‚C ‡$ÁQ‚°Au°…ÚÚ¢¹Ñ+œª,p%‹íE‘1–Ó0IÁ3(U ÆŠ±‡?ÃíšïÇŒÅYqÊ3¬§k ¬69 ÃR3k.Ì!?Äæœú÷ŒÀESkT ¹LÉÌÚõîëO„õ~›Á)9Äç?9&ÀC™yv3m€¾ŽÎÖêf“UüÓ]WÈ„s~ “ÕôPE­¯ T &XBHŠHÍ~2Â[å*ŠyÎña{–s½‹Óïw¾îhéùª7sj»œàBlŒŸ‚mO¾öìµ+}i¶t#ÄÕ«·|›ƒO¤LZ\Š’ç}$¸tOž LÒ\6'6Våõíå`ó.O9Æíƒ^άPÍBäÔÝ¢kÙØH6˜¾—‡bI‘¾”ñR †l“ž3ªËçœÖÄ)¬²gSu³Hkh\åžÛw¥Â ¢NÆK2&$ ÌëÃe8ÅÉv-hŸÓ3!,ÙfdH‘9ö°<>µØ»Sn[² ÙB†*‹cÓwY +€£-eID§kf›ZF‰ Pm¶îTÈÀŠé˜É™îSŽƒAhÕvË’’UÝâiå<$ãc,§…Åe7nŽëkÚÒIRõâË+ºEès”ÉPŒ›ª‘¶*€Æg[)"ë/ Dl¥sçj¯4‰ö«†DÅkR*ä¶DÐÉ"ˆiŽX7ÝÙðîjȶ†x›®õK&͹Im¹Y§£cŒ6ø5DÞ[ÅÚܯ‚ÍÙÒŸIa"â2ü6”’Û Ö»¨ŽäØ@›Ò€Çóhg[…ÝŽ€I¬#£>e…Ь=´Ï °q´ý^ENŠ$!Ö&EïŽu8š‹+Å ÕÎx7÷Þâ&xŸUòXý·F¬°›.ué.rúÆz{i$Ö¯6¤õ·AdÃl)O™®Ç ±·ìŒ„‹„[mŠ.¢E„ÐøÞ~ÝÛéâR8!0 XÖ¸²•¥·k<Èê‡Jwº÷0Ç-î9›Ô©Ÿd!nTÉ T34¼}s;ÁI˜Ògd’J¤„“9¹šÛ^w Hn­Ñ¬ÄÈ–œ™qãt)ràHÀ†gŸ«²,f*¾©!JXm¤X9¤&G%:„ Ûcülx× BP ~ˆâ„†ö¨’0Q¡~iâ_ÙáaJ!ǹ„ÞÎÖ…±• ßGbçCµŽÜç¸DòñŠR•ˆRŠFСЂР*¨)!¥¢’šX©V€ˆ© b¥hHŠF¢‡ÈÞý™|·: ú±ì€P’dîEHƒJ(¿ JPâ|ö•ñžüE÷•CÐ èA)EWÜ!BøžÃëñ°|Dúʧ°é !I@ £‚QâA) D*yç½ï Á}‰î¡R„Qà£ù€?Cä”þ€¿{”ðˆ"‰†"ƒ¿8’f —E%4•I56ÆŠi)¤¥˜)bJˆ$)¥"™‰ˆˆ”˜*ˆŠ‚¨Š¢‚f¢*"" …‚¢†JŠ$¥‚bJj$¥Š’‚d¦"`h¢’¨¨‚©¢ ðû—(úêú€½j‘¢”hJi€¤Zb ¥ ZbV‘¡¤¡¥B…( ˜(b ZhB„¡ ¥†‚‘ˆˆ‰(ŠBŠF¨”ièIæØäô î€ ;:ÇÑ*áìØ¤÷ Ò ¥"¡ˆÂžè—îÈNÉó @ „ôø|~\ús§NŸ>sŸTýuÒT’ú<ø {Àÿ¢}ïGŽþËÿ?ï_øQéDwøL\žöí4í¬¹´ÙHP”ÇޱŽ?°$*–Ð0ãd俯•{"9šÖæã›,¹Çròƒº»tñ»²Å[G3¤Y QÃ"gXb"}€ þ1mq`qvÛòpš ›^Ç´‹ô_n;y„kz ¤/^¯f”à‘‹«ÚåNM¾{°eߢ?f·ô`ínR¶'–·æØ¤mâ-šªkìz¤½«Ë×;Ñ7ïU¼^¨hôwTÑz'QšÔ=z¸mŒæ¦íÂÍo¯l5Uõáö{ßC¨l‚Ôw¯n<]Ö«ºÞë` C-Æq%ZŒlp¤ÎLÔŒù­Úôc¢W,“±»¶7{¼¢zŽ\xLÔlíÀ½ Ôïy¢¦šêœ ¶p¤«¹Ðäöø÷³¯™=Їœrí© s=*r.œ{½`,c$-Kd^ÆõëÄÈŽ02% Olú¼Ë&1†‰†%PI¦q|pl3^ѹñª®f“ºÎ<¦»Û\9n'F”r3|.^޵½ê"ÓÑE_r/y+k›Þ·jPêOH.ã=Q»,e ÷]‡Ÿ/»u’zTXÜmGŠ=“+2û¶¹œS!(SQZ¦4Ap¬oV>nhø§u‚e"£…:­dêð73XD‰µ ¬Ù›d—w6ij„Å»"p|Ó@Ù­Š#§^ðyl·Ï©Ê(oc-6¯ t{œÁC«4ÄÌBc"yÎëÖæ¦˜Y²Y2‹çrŽ Z­¤knƶIå$3@ {6'¡ÊÈ“M³ÚõmRÛÜÅp¯³eõzn¾5nT™ ·#Ä`™‰“ [ïEÛu—zöû¥³Ü9T®AõÅ‚U%ɘ šEšÙQÔ5¶sf*–ØZYzÆÊ·½fí"o}eñÅÁ‹®ìô7·+‡<žóõk«zÒÄ{‘À©2l‡! 29˜œR/:Ô dÓ1mÆÖò#¸‰²£•™­#Íòm£¤J;6­7]ô…i ãè¾GíîñÍ#>3L0úVÞ”Žàf4«‡ª¡i|Xô^W®­jšÖ`@!>q8”—¶5w]X· ÕÑL­JÅMU?m"#|Úl5­á^Ûº—…>V«ž4WŒ‘ë%žç;"¯7mãW‹Þ[Î@âªÐöiº¡Ã\?l± Ú£1L—VÁTãÕ®÷ÑyÕ{b;#ÀdŠkR°«œéíõpÈÌêC£Z/'¯Okî›Q›cój;Õo1 —(ÎíêuÀ¶ÑLî)¦êy#ô÷žò4…~åÞˆi¡@™«”,TÑS·“¥ö¥Q¹SW Ðôð–x¼Ûí1‰mÍ»raoc5JU¹q Äм£)—.l³UÒž ¶» ",¸¢{IaUñiÌ&ïHjósHxkwÞö¡›Ü\Ó`&n’RÑ:®Ñ¸‹ë‡i°d¥)æ.jl W¬nl8âyò2 ÄæÛ1¨"·eݹv™ìà–¦ÑçYX–¥¸P'.„ZñT„*¸7~z5IíÎÝ.õ¶ÇÍ‹b ¶ë]—Y¨\*0ζ¸ fî÷{´æ–œŠ46Æ`Ü'§ŽQ“¯3Êz¶ìÛåçӡㆿ[ÉdSÛÊÎÝí®ÏF€Ö£E -ôÆŽ<\Ûì½5&à¢ß{Þ]áN%'¹È/*žŽô~pCyìÁ9±Qeñµè‘ÝÀÖæŠë Á­=·ÆF¤½ãzé%NA¾@…릓zØýÓSmÓN÷›¢-2$à ǔ¹L<׈]fx³Æ2óíÛ›yj-Ì02“Š.kyݲòКö´ØäÖÃ’Ë.0È+0že(;^fý<íÌr4kÌ6¦¶Õv¦¶—­c7d<øá^Q@­=»U–Ðu›°Ê’'°«î”kŠ…m.–•êQW5&\8á]ÎG¯«ËkQ‰†`c#tо»îÚŒ×{Ï× ­” »˜mÒìd@é l*X.¥vUFc-‰è?V0snaIKŠ•®«¤MDé³§¡b†ûmtM*ë»&èªm‹c™SW½)zu]¬kשVÚJ«õº÷m€ŒaÎsðÎG,y¼éçì~Š€û?XŸø‡m6a#ÿ=ÙÓ£ÉÓþ~Ä`w*§üÉÔûïCW´µ4°«QhªÐŒ„ñÛøÛ1R1±T ­-ë»! üd„?·öý¿¿ïüÛüöwWvŸÝ¹¾y½÷;G=œqÈfC/^ò.ßyT{Ûj"Þ:ðÒÈÆß¥'wbh¤Is#Çö•±E°M:užwÞ:«¶Ío6úq5½©ËzÛȯ rÜ0Ør9$­¦3ó'åT!ëkÔ‘úË}¾ogZ¶lI ´]âu‹Æölïh™Ë‘§”ó´äõ7à„? zfš¢€öÉ ªil¦Ž@#ÌóÑ„Æ*‚+².1 ‚¦‰ªª¤ŠO>ݾÎa\Á ŠVîF¾ÿŽÞZ4ºÛ+a ºÝÜwÙó'ÎR‚Š(vÒ¤*K?M¹¯YnÒik„1%2Eq2~—QRG¢f¼µì•]F\ðżÖÚÔQVÝ8Ëþg ܆g» Ï;‡);rZã¦"ˆ%Z°1x’˜T†WŠDEª)Yç§óg>ùª.µj×vi>zºkía²4hr‹-emu°RI»ü/ùš†›ãíð¾ùíODD$H ï-Vù7ÜÏϾ÷ãiHüãcFjŸUs›m¯@ª†ŽÒýy_(þ·ê¤Ó(_4í X?…¾^\ÉYÈ4F?¼Ë„8›Vf&„¥ê>¶Gö«¥^† éHÙÌ—´qêÝ à(üQ<—ÆûÒ‚hA@ö~¿‡µ`ž ä {ù AKBÓMP4ÅBRÒÓB‰E ”P%)HRP”KJ¡•HéMRÒб1 Ñ@PÐPLTH¾ÏóâÄÔPTIîÕU-AM5M%ÁIHRQCATUÒÒSTM#U2DÑE4RPÔÑ,Å ÅÐQUT4URET)M%D +JDÑEµMÈP-"¨,U""‘!¤‡½ç I=ÐÜ"P” Ð4€Ð¨R”‚¥"R£H'¿_LžÑ=ƒà‹î? ô=¾=!â ¡•ø‡ˆ(„¨>aðÀðñ •…€ZJ)¦‘* *xx‚ Ùª RŠZ)bh¢ *¨ˆ* © ‰ ¨†©JHŠj!((J"–šZª™ ŠJ* ¢jI‚J&”ª¤¨’‰’–©j$¡ªª¢!fB…DPDª©!ßÌàC¤‡ˆ h)¤(”iFš‚†š(J (¤iB†‚ˆˆ’("B‚©@¤ @¤ „ ЉJP¥ J) ~¿zc y ëTø}J¾¿;ï}ã!H§3?㵑"ú³ŸLP;!918–ÚfÅI³Ö›Ç•ÈĤíŠyÑU!^¥tÝÄÉ®ý¦‰Á¹¢3W¨bƒlzv¨7˜öì™GK?H"ÛT2íL#¥ „cêX¨ZDíZ·ì^‹íž¾÷»pÔDioŠïlÚ€Sˆ´^6÷Œ«gI\Rر,5é¢y† äB,£oØ OmÜ·¡‰YE±˜ýÒÖ¼Û©ÌÇTÑ1îäG‘ÑD­ÈæN¶bU§Ž/šxÓyœNvúµÙ"+ÇS¹½ëÞ÷¶o…­kglõÒŠ¢½Ö`6(sÛšuŸ¬s¥kÒÕ®Ï$ñòöJ/>ΘÒ}ê %ÕÑNjÕ{”ºÅ¼ÑêRTqUj#kù`PËCÔ‚Ù„ÈÙ nL‰v–sæÁtbW_?l•5-€¡fv§rÓ‡â Ý9!s183R±±^n…‰Ñ›»2îB&°°…-™»+E—Ö¢']kCŽºhÇ¢Ì{.0ÊËJ°Å¸ð™‘ A¡ dÜ”G•·#/" Sy äAbpæ·5¥6¬&滫æt8ò²%‡]Õ˜è0 À3~_U!Ϧ‘N¶Õ%±<Pë£fhÕ(Bh*}u`)1ò‚%˜Ë5ŠúYC±( +˜Í? ³•åÌ;¦KFIH¯”T²d„=² &7P™uœlK©¬ØªtÞ‹ÊAÀÛè¼më\””dˆɑÁ3$¸if¢[µÉ·aùˆ8H0Ì eÙ³¨€>Ò¾ <³jP»É¡MåtÓy‡Û©¾“ÚÚí\‡÷.-e¹òqmH1[o5¹ªÒ0!ìîåH캅¢žgÆ#Òû}êJŠƒ5R·Ö2Dñ«cŽv—¾î±Õî{fOuuç¥&fÒg2™ë}±QÖ1Bb÷?=·abDé¢Eé%Þ^J WÞ›ž}9 Y¢uxqæ¼èÖ,‚[VÄ4ý®È°”Š­¹×ÓÂtô”‰”i_µ|T¼êå û4òØI5£NeŸ Â.ê—A Í™:¥~* WØ'UÕ[¡áI&Åg{•Š"ã§¹£Ló’Fhq²Ãu!Yçµ;`Ÿ4Ï w¹z{WTá5¤ÔâÔÄl§\a©¬Bí‘@óãÜñ‡y;žñS|Éìx­WÛzÀ&­:E¤ô—Ç>Ñ È=®7=ïDF÷76œ"ÝØÛ‹ª¬8°PóLPþÁ+XÚ`üÈgv©m-“«fŸœäÛí‡F…̽kŠ+¯dw*èâîê·ÏÄ4×ÔUŽšO!"è“QS „ö*½µÏZ¦¦óË®úmr¦YŒ‡ÛtPÝIXl×sXÆ´&;NoŸs;í¹Ð< 2Ïp)š(ÍN“Ë6(f´£vË-JNR ÝW¼÷Éã^o²¯&OC·ÕDy¥çá™/‚3ŠD²² ‹ºæîæ\T37v Úš46F#ôÅwT5I é!@Ëk 2LvUÔëÝçUmîÖ+ÔªPPÙv÷½×ÜŸO ZÝwseÀ*šÁùG†a‰P­›Q Djd`nT;bqêQšyÛ¢K;Gu ÞÕÈb^o.©âBTƒÉ)@ xßw¶O Ô”€ôÍP` Lb9vÌújù8«¸kš¥=¬cÄÛ{ÜpmÕLƒ8\nÛ§ TxjÑlRH¬ëS¼„¤¨! å=ê²m]¿Û;ÈcbˆtYÆc.}Ú_G¡áÂJyËÞ¼»Ý™WEMš®µWzrjšw°*WŸ„òµâh=w¸ß.Šì7†Ÿˆ?:ú¥š ¡ˆå/\q£Ó%«£ ! ~“µAìUW%YÑ—¾šIdc4äñëêµ^‹£Êw¤…ÊäBȳÕüœ<¶;¼Šy‰O öæoS[ëî/wyö¼fyy{¨U:(çYl›3´ªh©HlœÖYªkÊÑÛë_¡=Þó0Aã8îïț÷7/À€²Q‘‡0HôìÍYÆ$ù`“PxV•u»º)ü#kfŠã¤TÚ$²{`'íÍ!êbÌK 0$†«µÛõë[5`‡™˜HÈC7¬‹¯,º÷"lFæaC"vá`©7T‡HMãQæîÀ(@3‘EŠœJ4 K>×éÂê½)Ïfå퉛PuRsDm蓲/m®lî3¼Ý¸ƒ3ÂNDú¯DÔ%ÝåÑrzó`â VÄ7[wÄUröõí÷ µiaS­<éà9O=e¦¶cÀuDÁ‚¼xh™dh5’áÓvzI¨OÜÃZ•¼¶Ú+ÖÞïJR-)ä •ÓÑo½56Ð^6é´æá#%ʉ2;)–˜U$A´â— Ž Y{CÒqˆÅjxÈ•¶ ¡ ¢vч V½ \·NÒ4f‰!Všî"EÞ’ï½V•ùÜòÞëoIilºqF`ÝYL{°¼|¾§X˜dГNZE¬-ÛpæfœgkÁxÖ•¤W+;››«íª]£pÝYÖã´`VÆa«3PŒbÛ‚‚3é4à´DªÉƒ⥭²ÓÔ£›§¯Kæï’‘»(ß·qš_o¡Gê¼íE^¯uÑšw»‘}:ÞS­Ó asîø¤==_X‚5Ž¼Ù»÷,¬0Ա܆n°ê€‘FGf¬PpÅŽ4*@Ö¬te˜í‘K†¬®±t×3)yƒXFK Mp©6±Dt*´'QŒ¾ÙôäÐÇé=wÚÓz\ô†3-¡Q(Üݵ Ì׆…Yf]Ýß®;óìÅ4´ÉÜOt{”Iû]ª{j ù²L'¤ÕíêÓÝ”ñr¾©û]~WÑà'^ú±8¼¯=Î$1Æ×¤}½U–fV ±TQÙNéÉ.晡#©*çU’$”dx5(!®JÑNéç½/œNN’ȸǪŽx±:f³}"÷_PÚò?}º/?r†î¡VÊ4nÎzèP¬"M3/h†v`7›×òìŒXwY ­DaÊQ$¶…¯fk§êù¥ï3y8fK&ÉJ‘¡ê!ÚÚȇºª¸#¼ýhot‹šô[éGž/zubÝŽ º%äÅ‹ªfñCºT æ­WÄÔ*lµö¤ìW»’4–ôÙ½¸âb÷§ÓÚ’Èg’ÜÄžMåŽRFCAµ‚ JòÝÏ1›Q¸n%Ìì›Ò3FMáhE˜KD%Q"^9Â_sq<Êû™¢¹Ð V,™;½†S½çwÞßHvˆ+µït€I;hhôÊôÒЊuŠ”àÃØeÏ=¦áa‚¼õ¥Üöà .6ŠV-‡¼Ú« »@¹1éa©ÇdW[èšÊO-;WaE©ô*+TbjZ´ª) t±·oÏ>‚fZl7qLÜÜX5G¨íQPÄwP¸©!uI:=‘MvÞ ˜&í§§=½®ëõb‡»tõ›×Z©ŒG ¬îˆòô“ÙÂ%a#‰<„ÌÖE•©²Ãµ1’¬b<(ÙlTàLÚ覉óW׫ƒ×hÀ8Ž[3Ô}!c¸…ÑËN¯¾)/ž»÷›½Ðp½žNø®J&•„„ú‡ëúýð¹ý‹ÿßø:sçÏv†‰îuøªˆÚ¢-–ŠÎtNÕ±A¡£mo~¶-†<Ú¤`#ù»a²$!$X‹BllÛ?£ü¥þ‰þt{$šúÕ´jé:ê5‚¢$„jWýš‡"3Ÿi˜¦ZÄ—)ƒO¸r§oØ›UÍnŸ²Ã™–’ÝÜÜÂSÔÂ(­Ö–ž“µx.éfOH¯´;ÉîÈ%›ÂØh¹n²”–»R:NîÖCQ-"ûäj+tÌÔþ»ˆ»‰åCfÐÐ{i¬¨C`Ú^\Ðô=›´üýþƒýgËcQ¶5ëD}‚0Œ¤, ýºï2gZ‹ü%ա榥 S¿Ì„¹=$æ«ãÁÊs#s"2CöÉÆ…i?IˆH#Ca­z>a9ÿîôøõ^pç 08šÒƬ‰»\E„‘HcŠ˜}$’Ѻ}ˆ†»h”x)–b¢Êßi\†DÚlƒ•òËÑ+@a­ØlGl•âLÆÿ8&µöÃÝùôò|òyz;;·vÉB¢â/á #¤Ué£þ~p\ž†?Å¢ûò%©y\ ÙÎaáuDqQŠ·ù³› 6ל¬~úvžœxЖkHPªN(©,=ìñ¯ÎÆ@¹[ ï)Û[=A±@v@6ÚO—M ’\¨œÜ3ñNÚ¤êE [$lâ±,I ©¾&¯£a#ër‘%"7¶vš*è z¹+€c:dcº¢Ú*% ¶.8ÝõMUF›M:sXcG0»°…dp¼²/Ö_¾èßÎâ‡!X6Š ¦½z¼å¡Ëp-¡AQ ¥R,ù%*gßoyŸt&0Rv~æ9`ïß9¿7—’vñ§ 3 ¾(\ «Æl·tÙ"Šß(j!m8I5Õ• 5"[[R²­Tl„CŽqÄG5°ÄªÊ-§ÇrÊÇB°eD` R©¶nŒbÒ><Û]»¬;P²Ü í_^ûÜ}œÀ!“©Å6£*x\)×mPš+ Ã{$c×1WK­Pj‘.îË¥ž;v¦YVY£sp aƒ‹×m%Þ7cHÃIҬݎæÃcÙç¥ëç%2©+»E»T–vËůSÈZP4dRÉì$0 HƒæØõê׿uÓâ6X›>-'¹û°„QRUòš‹H­PŠÀÃÌŽXÏŠz:vâNf0æ:IÄâ1'pDÁ–H°VÓ 5÷¥—;‡;nä!"9›c„«×Î·î ¤Î艤 ¤ÍºÝfBØé‹Šˆe‚­ÐŒ I3)el©ƒLíæô­¦"X÷,CX¦ s8ÃóÁ6ZóƒÝU ¹Óv䇞á“=ƒOôõ~N­úΤõ¢¹U’QÚ»šdÔHÙšII+ºÖÆUƒ5¸;ëº&]tj*%lDO‰È䜮×-…NÁEÊéäé»6Ûÿgý“êûaø §Öðz½xˆq>Ó$ÿzi²@òó¾ðlÑÜv@O0FODÒ¤M+EPERSUAT%-ÐMR5C4!M% TIBPÓ2P4JÕJ_w­ëÅGÐ+ª‰ªªii*‚”¤)¤¦–`i¡bF ˆ) ¨†’”˜*"‘)J¨XФ i¢š*&„¤"J¤¢¨*š*––d¥ (*E ˆ÷§¯xŒ BAa` RœCž„¢ûæ@E÷°‚½ P~¡õ!²‡¤@ö"túýo¯ÄÿÀŸöÿÂÎŽMÝWÿÛ6®UªcBU÷]Ós^Ù²eG1ˆ\›‘pSR¡Ì¬FŒØãšþ:rFÌÄ5h¤žÆÔ)ÝJÂ-ÊòD4œx0bÚþ©—(²¸ñ’©-}t‡¥e«Q‚ˆÀ£Sˆ,µÍù'B•ÊÒ³ã†ëEé]5š@jÃÞ[íMQ'ÅíVo.ò·œ»Hw…<¼­ét¤¾ÝEÝOKÏ·ÈjtÏs´böÕÍE6!ÍV´a±æTL¾ômÀ"‚2;¡´ZäµÜ Ÿn6ްDÁ6R[÷f»—SZŒµêÔl[x§?M‹¹çí§±b™(r´‹^(äÇ¥Bžô—gbª£5y® *vêMUÇs³3¨8kŠFžðrqVk¼²\ÛlVòŠ*Œ©Ç¼8¶{’‰n“‰œ»lyÙ;|ÒpjÚ¢[˜æØãˆEh±Îb‹©R…Ûã½½ž—WrZ—³È™2Íêµ#·Y“g7¤š`R°l[u0ò­!EšE2#¡…ôÊâ–¬¦‰¨ä›7`"¡† ËôÇ£HÍ5ã±h‹He¦áÖ5ÊÐØË_$†wyÍkOm*¹·–®ŠG†­Jctª ¡NÉzr`U¯j„]Tó¾5ï Û…mò¾m:׬ö ó×Sé//-#ǩڮ“›’ëkd¸ˆîýµZ[T"AL+ƒL"ä6<“hWLsÒ †ÙëÝ=j·ÛîWÝ7+8¶ïsÖhã/`ü‰5ScŠz ù»\~Û:cÓgš¾¼!¶¨³¯©Ò)k:ï[£ì&ÜÝíõé_£ï`ãï=®À”‡s=¡.@ƒ‡Û’¹E"™MÃ,œÊk²üF‹"ÝÕ­”$È…‡i˜!æ<ÉYr0¹n›uGT¸Þeͨ%Š„ &’²dÛtÕ˜FÛ>’™€›e¶G˜hÉ)± w4j©3ñw³QêÛêzj¼N¾ŠûÑÍåJ÷nŠ‚Ø$ÁÖØ.—Ì•Ú4înâJd{"ÂpW%Q–úbøÈ4 óKYgºj5ß9ÈDçWDâiìxڹܸÏ&%”Äñk©¹Ò*µMƒDˆâ¨Ž#ÈÊyWƒ’ùû¬=Õ×#“B§F¥×·@‰«#"ˆs\Þo¶Ì½ÆºÝïzê̪ӷ׻$6ªÎÖ¡¾â¬{j·™“n[‹~Cw”$BKÍ’ÒÁÇ•B˜!‚„OŸÎ130¨cÜiš®TÃŽ6ÊYò‡U®}‡ulg:ˆÚµÂîÛ·ƒ”q>ûwˆÅ¢Ú2wBÛšNäÕ“Õ Xs K›ˆãÍYM:µLÇ.Løm›´î-jàÔ¼ÐöÒû¼öß{Úú)¯Î„7³6f•6‹¨åÜøÓÝ,‘¬7ªÍÝíž39×7Ý+ž´2I0“2P˜UdÌI˜–æó„ßw·}çGvzvŠWPÏ’ ¬ØV†Eµ2nMT—ëR5±!²‹dz^3“.'›^:”b[I˜ª¸¤’è7JFž wë·Fé»7½ÙEzòezʹˆÉ¡Ä·Á‡p‹f 5B¡ÀÚW ¦Q0MµšéJ¬Ë3‚ ¸Q:˜2E„Ô 5ªï‘yïƒÝGåé{Lðbïz®Xñ ˜ 0…Pë ‘…ê¼5™›€ÌX««MÚÅ@æd„$Æ"k«™›› …îG¹›±›•ÔòÕ•:R RÙ?WhvéÌ®‡§ ²æ¤LíïSÏ ˜µn-º¦®¢s´×»Ü÷ǯOz*K훉µ`ÓIT“X5˜‡‰ß0îž›_—†Ó¾ñÇ[ο=MÄæ°‹ùU7 7.–d‘X`gpLÙhBœ0á¹ ÁŽáeÙ»7=ã¹{Z°µ]ÖΛ7ªœøº,Ûz<ÉźIB‚މdV±43fYw›´Þ`L9ÂQ…@ÊÏn»u‹éúY5À‚2Ï€PAñÌ  Î îøPR(›<èõmfû¼ýé3ŒQzx²ÄYnQ»E± úã?J‰Ñ˜àÖ²Y—„Õjû¬ç³£“!)ï+Þ~õsbö¿o|u“ Î;Xœ“§ R>œ—•XÈ{Õ­k³—^]-ÜWÍb«Ï2éË0ÚL:S€™Nê±Â^éúÀ˜ Bµ:¼Ð+ç‘k÷t]g¢Ù"HD#RGœ<v‡s¯Ú÷Þî6ªæ£ïˆ+>*ÆêÙE:’8IP@æ×SÝòØø­tY¨Ì¹‚÷¸¨KPýsÞzµpÆ¢v”+c®á§£RX+·ׯ ¡nèö”;žÞfø Š¢*ЬŽMVµªÈè‘a§–);ˆÒ‚ÑJ>[}Hß‚w´ºÝka¾½àÚðŒ:Ê›fP´m ‹£O&MÆq;J–ì`‰ˆ0îLJA1ƒ.—º“&ƒ]Í:ô®èOq£Û§#ËdT]4šgÍ!³VêÅ?lCr#Óöní—ehŠÒÐÖ|ʃjJ#ÁA/+0•.¹L©S„‹kÈhxólfïwTi}Å&¢µ‹T]Û±«°ª<„iî5©=¼z?5¯È=m¼KèÐ.çë½° ëÛ¯a†{ÕçDùûÌžñ\1ysóî—,·»ƒÉS½ëaeƒM¯}f¯+_—›¯Æ#4ª=ñÛé8HØØ%7ÀØÒ5u6à¿0Eió^|>•'pd%‚s4ºnJÖÐLÁSr[Òlœ¬9^­]»âùûŽÍ+ãsÁ–POXmÛ‡†ÌWuqå"B«{¤[t­”<ÔP‚nŠá€™Psù"2îpãͮԀbÄd$ˆdN9+Ù–,ÀO%ÙMÆ>×b1bv0rÌɲû=²x~èÌ"“ЩûÞw»º8œ¸ôJ‡·fjæÚe;Aáês¸d˜¾%Ö¶ŒÄ ù§¦I¯dçÏÊsSyUÞÙ< âÄngŸ¨ª|õFÛÝm±SÝÞòúßÍß$01å´ß1ð™Ø ð­UÑ_z"z³²¥¶JÙ’ÞÁÃjPU\ÄŒ30ãv«©hFŠ¥Û¡[ŽŽKÜ58o¯Ù'P›ï{’BojÔð.g@÷1›çs´Âé[¨"l¦îfažÝ½ç=ßZR #%BT¬…Ù[+pÅ ’¨k®s°ÎÌO!%aÛǧ=ßIP×¼°¤åÍÎö[Ü^[*qjŠZyÝáU8ÍFf$ xÞ|syÃ¥ÓÖÕç Ïd `ÛL0`Ÿ=ó˨± È4y!ºF;M) "0µÜY™_.=èÕ¶û¤Ö÷Òøžë=ïtÏtùF±OK}ç:½$_9'Šù¨m—ÄŒ'´¯Û©{%$I"d§# b(‚V!Mh#!I(å”¶ì»$×¶¿zvìs,ÇØÑ\[š7ëË%™7rS4Ù\v˜NF±ÀÈøfë4XòVp^ì*‹ð$bÇðt/¨‹)‡zª°Mïsšt׿ó὿ïl¬}ì×p“ngüò‰ŒR0`‡ûìøþ«?oéö‡?ßçkœ½½°ï!vüçü|ç¾-¢ÒÛÖ(6¥­UR'øß–ÁEa*(gnh¬GZªª#ûÇö”ýÏáçóš÷¼ÛÛüÕuÛ‚Û º\Kƒ¬Acz¼ø·qlÖd”Ý® nµrà£JX™Ã*ö)Þ4{âns³l›½ÛéEÉ—}ŽJš©Ï:÷}Uäv^‹À ï¬E3©Z´Žn7r»¢Ý¢à†M™ïɾëµwë8“EÌÛU²ù;Z5ë/l–§µòs BÇjæ£Nvdç¸;y©ÃfUï1üŸ¿òŸ›“+m)¶ã¬”L1ˆ‘%¶%EV1@i ¢€›Gä¨~`™0s&4U U ,yËkmVqTqd—ÍQ‚FE€P!/'!rÈÿ?g dæ­=Ôœ¦ÜqAkŠyãH\aâPNºTCƒ-ÑG• U»§ðúo02i™>]\€±’Û’Ÿ‡Õ€ôPŒGAÆéñ¹¿rX©˜«i+ÁYvY²,#+ }ÒŸ/œ‰ÎÜùs)ÌÌI[m{!iƒ5e~k•Æ’PØé%ÉD¸Ä~êôîÉØÇ—^UùýÅDy¿œÃ.ŪºÚøl7¦¢½×Ý“1¸œ5–¿¼9v±>6X±[b¥·@{²ÔdAdÏÕ|fÔå¥5œìCÎ'H¬É">v¾Ì5ÎT]ÙsNÉWÍŠé¯ãw~n¤Í¡¨Zဲq0Fs \-.3©¬Yë©…ä¦BÂZau›ˆ”H‘ž…`¸gV=¶cRX™5½&N3‰™ ¼î„Y*#˜è¼ìh¿F­"òMÅdΖ ¢[H#!# š’ÀÄ6ž ›f»ŒnU¥7·TÞ¥Ëó!ä‰D‚Mf””X™ÄÌï¹»Ù'&ÞìVæ[)Å`Þ¦öZš¤XdU²È¹’q¨HSçÄ1{‘T׸lÊi°¨k>,•ÆkHÛ9¦%QùµÌÓá|x¨5£é±žk»i©ë.ìˆ 7‹.e\¬U‘gV@Ï·(v·Í§è²ÎDeå þu l¯$…¾zª L$½†7ŒH0Æ£‚‹E:ÃxÖ0Œ:"Õ%I:hÖmE¤Vƒ¨°‡ÑùI»‚WâIWĵ³áÀu`FB!›\¬|ØØÔ íÇ:¡Œ6TÖDÕBÊ[>®r§ï¶WèL„2$“Î-~.­Ö숂]U°Ø“Š£ªI°ó‹‡¯6Ö9viµâ˜"Œ)ƒy G”LóämÃeEíý‹$Äg ¢è*Æœ°;Õ¯ç%ž{Û:¯v÷gÞ>rÆÊl™+B(ÀÞl®QçQ„„®NÍY »-'0Ë¥KLîáté•N!YÂ,  ¤ c™ VÏmÕr;i”ŒÛ&&L˜ÐecÝݘ ¡pFHKIÕ&îD¨‰)¢µÄQ9Ç!•1CÚè…÷®‘ ÑráTž’D¯èl;XfzYÆZ;K!®ôÖ|¼Î+9;ÍÕ*Xrm-øg½ÓRƒî:×w9ׯ.|”øû»ÕSã 4%R«HBÐô?UÞ H¢Š*¢Ši¥&fJ˜‰""ˆ J *‚Љi¤ ¤h‰j"(’ªš¢")¢‰ŠZB ¢¢‰hhª( ‰bh*%ðïÔûÉ=úGÜ%  5B´PP4Ð4%"Д*P…*°@ßnIçs!Û9t ³ÍÐ äEøï‹×ˆ‡¦ ªJZ ª¥i¢€hhiRˆ®wßü€UÔU5ÐÑI@UUM4U#@DQE JREIM4ÅT±´‘ñ=•Äy ˆÐ… ”(üÑúGŠ=HŸOˆ|—>#çÛ­ô棵Jœgm]Þõº†ßW%+ Õ55¯\ʪ«—µúµÏŇMß!E鈂­rkÁ{ŽêŸ¦ÛOed[©ª=Éè·MÖñ‚vl T¿9vB!À²‡‚›™°gsWÎY4¾Öl=bcFûUݫЇW°»pó…<.ÝZ‡í[:JÚÖ#˰N`‰q&4š&Í`øº®ï¢Äƒ…oµÍû([u¹l‡ R m·£õG‘3kª¤¬JÌfâ9Ù¦òE^ÚÌGš™í3®öš»ÅÜܽy \YÚ)쑊­ÓÑC-IêúZû¾v—¥¼Ÿ[Ã7«µqÚyÎ,ÚOw·Ï%gªÔ¶Ç­úmiuS»¤Ä–ô„ÃÄ#ÁZ—®ËNë.õ[4@4 `YfËRiià |œág^aÁ=ΙÏ gÐYµnF‹Ý~'´|sî²Ã÷½9ï{P{¾::o7% -™q´¥7*ºUfë›ëîíFYEEO†ª~0蓦¥Ó[}“y+RЧF-{Mo5mA›º‚⥢ÅI ud™¥ä{Ûy;°RÞÕ°%¾ÍÉ·^¡ˆrõ„ ©:uá­ŠÄ ]wi8ij9ï”îeÇÓíÌd-ú‰64uÖŸ+eùƒcÌ¿]¦A$ZÈùX¢ [cô—¼“Ÿº"![²«]Št·è Ø£íI¥öZ-§+=ÏÉíqhÌÇqJÍ2f¶-[ÀîK¥H[Ó_ºõå­ùŒ‡*oQzÊ.Ó¦¦µEfÛ%­.áÞáãR!s+Žã°e¶cWÑœ¸è›ãRg„ýÏ"]™Õ;} vD¢­ÇäÕ< bÙÓ±bÐS·6§™º-݇e£RL$ éÆ3H!BwF­ÝÁ›†c˜š=¦•™­×DyNÚД# † ©ÄvcVÍ[¥­?]ÞŠB)ÊîËÚªu׎·J™nyxSgu<-¾éÓg#ÍÙ@õÁ‚¸1Ç“íŠÕF†:_ÕÛ¶Ö­^ôN)S…—¢î»Îi­M¸¢Säzñé3á ®ˆ—)»œ'2ž`–,J&×J„ÝÞŽê»§­[W—20è –¢ÍÛ~ÓºrâuÖƒ-XTÓN0q%?Lìf]™"¶Ö;&}‚£±œ ÃBÉcÌ‘3 ¿lŸ°¹›¡ˆªU› $ÝŽØÓ´*­ÀóæÞÆ27FÛ3â$Ú™k·"ÅöÒiU*óTLòë9_)tå¯+ŽcK¹ÕÌÐ̇ì®Än¡Dšª“kÓa3.«C¿E¯¼Æ¼ë›Yéë:úݨt ê€%q€¶Y1™ŸD7sCBæX²íiXDwAjLAdz1 ON(껌$ÈÔȆæJm,"(ÍÀÚ)f}–S‘·ÈZXX%ê¦À`³\ÓHh d%C‹Þç'½ð‹TÕ!¾ÁÎN‹_‰×ÓÝ}Ò¾·-›vxWÝÐ~~*AF±®dh2ʉæ­7Vb"< Næé#`Ÿ=¥à̸1ˆjVe9*j˜Íøkš¾¾zì¥ñåÚÚg#kƒƒó=ÒJ2…|âM³ud3˜¤ì“90AŒ8î8àkpí3FÜ܈Ipåb®‰±ƒ÷yÊ^xOm%~ây8Úܱ“pì2˜ÔÚB˜ªo[bÒÔÝ¢Q.K2áÀæ‘…œº¦/ž©üDŽóqèÐÈ‚b­šÝÄÂ1£»%)êÍú¸¶iï—«KW'-]ÙÏx‹M„ƒ² úöã™Í6µûÈ=»ïÓ\ÛhE`lªl­ôîq«{µr¥‚~;ª·¯¼£=7Ôk©’Òu:Œ®J\o +äìß²#eì$TPr@Œæ’náÐ38•&e¯çI—5T˜#IÖ.Né7N]v¼Çw–çè¡ïF)U°&á¬WAW“µ‡mØ"ò‹zuz¥´Ý4Ö3æ¥u°F‘¹®hx†…‡ã˜÷ˆ9w%Æ· ²)¬Ky±¡4lšfǺ¥“ЦÂ]Ì…MÞ(5Co1ªàšáÒj[±&ÊÜwQÔr Wk´Ï{ÒÝOžje²xàöÚké·µ¨àºARµQ¯06ôÂ’ƒi6æè[„C`Svý“³H‚#­¦ Á„³§[ªÑ’˜£‘â6å!käD•Œk;»wH¨&àß¾‚š¾Ñ~Q/5¼Ù¹€³;ÕwvbW„ûVi‹µù¯Vj5ÎãÓ»Go7®EpòªMNXt.ë“mÅ»—Ùmæ„&vÁ}²Ä-Þ*r4Ô=ƒR|¨š‘ب•Ì©¥Š‰2uvk~¾íå[”‚3¯ˆðöæf)“ òÂdfæžÍa=Û+<„:;Ì¥9íÏ wÎÃ'¼Ys={\Â’$ a ¸o·__=;xc[ɆzСÀ†LBIb±ì¾÷¤çY xèq6nòòíö0@ÙÛÕ­(7ycTZ$ÌX6ç̹²ÝÂÑBÙqÈÅm‘ºÈAå—TKÞ­§³P±g^IQgkk÷uõy5ÉcZºN^‰%¦Špöò–2õº–`.î¸;BŸDñH„˜–R ‘ˆJ‹ì'Þù¡À€þ wEÞÑNEwq#0ç5,ÁŒ˜p9eAC QJ“0Kýoõœäþ»Oök2ï ¡™þó+ð¿2X÷滋¥R¾« oܺò“0@Ðlu‚™¯P Tê¼lONÕ±*Ǩ«×ËEC¬xEo;é¶¾™ &öI¯GTCS8ÏZ—(‘Ú¶o²§Óªºõó5KŽßð¨)_ÊÓBS4~;SRDDRÄ(RÓçn™4öntƈi*ÙÐÑE!ACNÚš¼:¤Ð®%„ˆ›û.åoïjaýóæÜ…9¯9ýÿ]Ó»œ‚BkÌ˜× ÕÎF‹v#k\Œnbd2ò¦ôÛõuyom›m}*fví+?àÀöú4úÕg;4Ña•G0®òÛ Gíïw>^’c#hÎ%3l…a»a'+‡Üáº9—­Êi{Y%ÔK>”0ñšS·#p+ÄÌèIŠØ’!lœÍŒÐòðóÀ™¶l︋}ï7ÎæšˆfÍmìm© ó„ߢñVž‰N¢þ ~Ó\gmxPÇ—Ô·ÃEª?Z™xF6iµÑm²¨ lA_«ï©•Žn"’2ã-úîT¦_£[²×³Í)ÄÔ[n ÷v£wbÜA&NŒs'f R³Íq×vñ8¬éå›&ØÒ;hMg¹Õ¾1œÏ7:g¹À…—`F2°ï^2;yÍ21¬ÃÈZb.cœ½y­=$Ï×±'z㉒UPš³…ë"`‘ _[c<†‡ÏIÁ°¹qÙÍ1âØ 1$˜…al§S;‡½ÜCi#Ø$¥’tØÖ3Ìu:˜‡³eú¸ÄôÉ⃓7(Ш銎Œ¸Ï&Ð]{RvzÔb¨¯W ¥IŒ¢Kó޼ù÷’}(ç‹£-¥ m¹cÓÌ +Ö$±·¯B¯[.mÝØ¤*ØÀ‡-¶+ök>e^tAbŽYcO¸x˵–“Û9î½Ãv÷£gÂQÔ”a_|à+íµª£ƒ"q4'6eí‰M(ƒcí§NK¾×x^fÞ²¬„êÊlù‹$$ ÃlSL{{&²·ŠpëýÐÞZ@)d&1Îv§`Üð¢6—tª’nv! ¥46ÅCkˆ+hÑlÊ(³›ÃÆÈ**N"R˜¤O‡Ò¢b Iôˆù tPçÏáïù|þ0-gý·m'ý¹u¦ Èzcuì¾Qz=Ý9z÷¤=êT-θêwgŠõÈã½2Ôo‡]\\ð»Ëv"©;yž=×ÓÖGî^ßu 1œ6Xû]©Ú}âÊšôôT%|ͱxê¨"ŒÚN¨Ç­ãtË‚UÛôyòCA˜~  %£péù gÁ šPÏ·LZáytÔ…‹Î2´ª·ÑªcjëWe°HéÓÉ¥ä6ì,mî{‘Ôµ<ªd8ÄÀ› ‰§}×®9ª:ë±qE•§-éÄNriÕ=¥~[9\[¢Æ=D=Zqr™›JÄ‘† !¤!¦šN aŒ@Â^­MY[qfì<]³mÝSÕey|¼ˆ÷{ñ-^ÊÚ^~¼ž‹×ÙÉE!­'K¶zÉ£L¦ƒ:¾÷ˆ¼Kt‘øëyß-iÕõÈQé®§¿n‘]PµlP · .Ì ! t¶¡k$‘ºª¥}BVÚàœ=4A‚ mTuG–4ƒP2mxr`¡¼ÙƒjŸ@Üw4†CÁELdû\‘ªeÝGë·Q#5}«íÀÌž§§5ÜÍ© “NÁthFLû6;«7f”¤(mN­ÅdØ»Ò[¾^ÖÞv•&øõ¾Ò3Þ@Œ$#Ñw ÚòYsN‡uÍIˆbõ£IùîA®nO”ß]Ñ€ ÜžžÛÞdSÒ'OI$} ¸£ðº·Áj4ß6ëÁ{\Òë‘FŠ|wâ.œÕÕ5Y»:ØÌ…Á¬¸èÚõaÝ/Ó†°ˆÜáÌÂ6CeÚå¬S‡ Ð^Í“ƒ^„æ«>Ú4ÁL²«°#PÅD£$–Z°(X‘05ˆ½Aé¦éU HÍ8¶+Y:FíO5ÝME¨l‰ÈêSr´sY-`½q,*c¹¯êo®\—w^‚ÓÝJüx‡'yI/Ÿ¢|Û÷¥TÍôVV®ßEµ_JT´a­7dtd…ãZ–nWfî`ÍÔ(l­ùÓ-iÇ¢arÉmç=/ ¨;©ÖÎIˬ0â˜ö¹Kúk Š\جҤ´:Ó ao["|@Fß®æC›«Ó‘ê[éãÒj“Am*ë9'™¸‹B<áˆ$Û+H‰!ð܉@@QKB$Çf†d(2u¹ÀíkPv…®æ ‰ ¹X­œ—q/³ Ýr ²:q@”–¡ÕS3ƒ²‘`Ë–‘3€¸!)M˜" bÕŸb™²“H(œ£PpF°'óňìÁwuA6)V sL(`ÍÉ‚µlZôî+m®•ŠÀ·3Å0n°ÅÇÌ—n8Q¯0²²{œößmºMš)Ôµú›}ívk-¦Ü袰çvNáôÙÚÇ9ÛéÈ'›—Ó^=Bö¿m»5³vöš!{ jõ¨[²—PÉ€6ÌȚš3T¹ ÒÓôr mÔ4áÝ™²ºsZ# ÜÈDlY™»pœ[wN_‘d­kndšf¼FÕéqWnùÆÝÞÀOÅgd9í]&ÔM¦Ì—Ki!½Û€ƒ$Ç´]#I9~2Ï,öl³{ÏÞSÎs­íŽ'IH¥œµ­VJ è.’ËÜXw6ývíº[iL›¬h&,Æ-IT¡iÑ„"bûa;µi–îÛ!˜+EèG™˜)ÊQ¢ØžP\P´‰0Û¥FRk!Í?J¤D×ôð:BP†ÈsÛg·eVÓc݉6¼ä|ÇSµo£5`£.¼Í¨åuÊ$°½Bð[b¹”]×¥…Ñ‹sjX°U•bÙ8žÝ[v{¬¨¼Vùêê>íW5è3[)t ¢§MAKao7C0ÂEÜrÕ›Ž _}ð¼ïYt£§ƒë¾/TbŽÀD¬%ç#·[[€IWN%œ½xºØä­SÄ8†‚hæ7tqÈ= ¬™³n\rÙß^¯ÛÌï¹g¬ÌÕ .¥iL8Ña-êdnb¿%Z|Ž»Ê娂3›œ¡wܽ[E|{wN›1›« 8§™R•-81³» qʉwZ椬dÓI¦îÒ`î,@íÝoMìÀŒº“:Ö‡ñ—m´¶•ж¡rEŸ`_jZ‘ƒ`¬"ºÂ .Tàƒ:Ë­Bmi(†Ì\¶œ9 ÌÒÚ‘}†X3K[PˆRŽ»¶hx!ä§KÍ**Š™¯¡S¶'ÛV¡Í¤ØÅjš굫¤o%í³W9YíÓ6a¥Ð‹ZvY¢US¸ÞLy®d6Wf9;e¡_!^›CÍ7}©õöø÷‰Yæ¤~‰£DÜ·Z­ÇÃÂzÚ3¶ìç½vê0aÚ˜-¼083˜F`AÀÂz‹·f4®ŒlDž*p–6ÕºlÝ]8´­Aj ·§TeL¹—·`3q&ÖLÅ›“„L´j“-d·³&0Žlu£©ä”nUµFµ&G^× Ëôï uæÝŒ;:²ÉÖ Ad‡!”ãë>RÛN vEÄSÏF2ó§(1g0i{¸v“azÖ¡ÐëœsH4ódƒPÎå¦\35­±è¶DR\ŽkBd…Ë>Xpv$²’Î{¶ŽÎ§Ÿ†Û¾{­Rúä#lRy’rÂ,Ê6WYÜwŠFÛB;²TE5…¨#öiok!°£ÚÁy+’ŒÒ©4Í™–åĆå5+ʵi:ܹ¢4|µ8§VT^žd«[<Úó@Õ fïØ •ʼnaËD‡$¦Ê0Ò7c„ê÷œ ÔB¶^ãvMmA÷T*«^ÔLõ¾Óy_<¼½yVô6k“Û”yÝ3ÞßIƒÙq˜†ìͨ§©¯²„TUêVèï&>9–.¦öz=*[w*=¹™Ct•„Y¶Âº%ÝrbÕÉ :¬«„žbyÖ”LéÙ0Ç Àlû?h¨¿ŸÑýöæÚf_Øä°Dƒ¡ ˆ2~Ü?ÅënC…þÒC‹æ¬zÙîÌ{_r$l1 olÖ–BI»Ú™²F cæ&%ÒSGãWü|Õû¤M3àÌ øxB±&Œ fÂå\ŒÚ1vËúz½:)ñžˆzâˆPÎá ¢$"SùT4ÌfÊ7õ7¯!Í7$1b{7KtÇoëf=ëÄ‘ÛêY¡+[Í#(‹°iR[H(2Ìm–a–§“¹PŠFs âLgzCù¤{gN¯k6±·(е݀çíž×ÒgžŽuÉ¥4Jh1W䦠¦4›¡û0ÄíÑëÍ#ä'ÅÓ2Vø–ÌŠ÷`‰¸4ñ²sàZ¢ë3Ž/vîâì"ÅüÍZÅ8¦{_S¸T©Èƹ}î, E‹ldI˜‡µ8Ž]h„÷a•»c\ª¸îÞãŒp›óƲ·Û=)LÕ]ž(i˜;¹›4Ãåçk‚eå1åZŠéLØ[ÛoN¼¾;Ë4¯·iA¬.¯Ïa‡8U÷®‚šÂ…»º! ANl­R(]—a§Z"hc!>I!NÍEôacmÜZR(&1 gÖ\ -]ÌX=¹±µ®_g›6/t¾ÜCbV+l@Mv³V¸K°• h p¶à«êYm²€BoMûxÓ,Š3JN]ÏÉ™òºRYDÕñ87mBZºÙ[_ÍöÞ¼ïU}DHRÈ"5À{é¢;8ŽrzÔlív¹;01XKÇw‘±W¯½Ù=³§ª$š™ …´“1.š\†Ûs"­Û4ÒŒ°•! Ë nÒTۛǚ;2A4Þn•Rè–à–£k…,$\…‚•xâvã;º3ôt¼oê½ÜÓ®GÀ(D-űãÛfVN4½®Ê”5›í]Ah~W¥¹¥E†Ûº5먶Ù¬”v¯Á¸é×·NäO‚};Ô'ÏšWaÿoûÿì^õµAÔ×½Ú0Õ4[{¡SÝg1±ù¯Íв‘ËÞÜ62iÍ·q²E$Ò™¡+%Ô2Ï;7eñ6÷M®–Œ}ê£dš{FÛÝOÜüjár\CRdQµ¦×kL÷½vبlÚ£h›¹F¨„NdX&³öµÝ¥Vúú]ñgaè·jå½.$c¢‰g;Î’sŒ05VÒÀ$ #-ÊV,,”°¿[r Ý[ݪmˆé4’ÂMζíË ‘i)(à’4Ê£e…šF2î'¸»ó]ô‡0:¶2ãzÊ\ÙZ&e©XáDAú餷B¨°ÕÂH@˜ ?A5õ¼3eb–ô„°Ãv[ \‡ç«êás}ÝÕÏ®{·g³³„¶d'…´yE0ÈöiE£ÑÍç56âKg¹›6ê[cЬêË4ŠBVDJbÆã"@(ÁYHá ¶)]ŠEƼkTÙd 2&ØAf!J‘‘$†|„Æ…¡Ç‰EuʦVéZ˜m¨ ‚ÕQIñÛsij˜“©vû·Gq­–œLR5]“Vk•6¤âsfÍãt]8€ÅbØî½`çt´d([eAFÄ2÷̧W¦ï–îXƒ}¼=Ö}ºUËÝ®ÝzÑnÖ¦Ö t%­tjr&‘ïö½0DlFUKÂUGÎDlR<ાéË¥‰ÅÑQ7VÏV•øL£¯ È7é=T=}.ûrnÅÝ!uíÞëM–Ù}ªãÝ»¡B-ˆ,VÖ«H‚˜ñlI`d¬™3®¢Ár°‰¦ôZ’Ö¶É• L»=)(ˆ.²“¤Ý…LÙa¤ƒlÑJÛ]7Ÿê´èírŠB3ßÚ%תÁÅç5~¾®±hïEhNô[ÍêõÐÚ#™ƒVG* vì¢Z,ù°#ÅõA hÊØ˜ÜM´ ‘Õs•r]`̘ffÚÑ£ôÕ„!WƒnÅYZ¶,(´ Ä«¼{ÎÜRǭǯ‡7·ÄîÛhýÑ’:ˆÓ©ï…@¥s'u×¾4]d&ÖEf¡ÃÆŽFˆ{˜LF"´†ÆS1–íc*s˜v‹•ÀÊûS8*UC¯á7Ù²•FÜ85›^:ð‰1Ës.Qetf ˆ“„í8NÜ ƒ+°} mŽ)<½m¶oŸgGlÝ›‰úiÞä £P ±ºÝ˜ß–låi䪮ÍÕ¦´=÷/7êªãbÐŤÜÌ1lˆÀƒƒmbÖÊÇJÝZ–ÝÒ)üÒ‡ ",4'™0-Ì„Æá_LÊäÌ·m˜CŒ,xÍ ¼ƒ‘#^ÒõÕÝÛc}:Ò®–÷YÒ¥âMÏ<œ¬•¸tdl¼G:ÓÛµôõ¨ Ãr·¡fÄ0¥ 8ñ cP`Þ¶ü÷¼× _Å·BXCŸKåhÒf¥>hãzЇAw4ªîiUB;¸zÌ禫ÊV*‰dr#šÝéÖ‘Q0s™“B´)—Áf§dŠ™Ñm± ÚÃnË2vj;W‹Ý/†ö½½jò»YÚµ,bØ,M1¤$¦©Ék K1´NF¬Pá¸ÂMcZpL£“Ζ(k´õõ·šá•6Õe­Ârtò×›n¡šó¥'—­öÄ[«R»`BDeChÝqFÉ"j&ÍwQ†’vß{—•Ë¢;Û´ò9ï¶A÷Z>V%ŽZm Ú,ó˜¬F‹EÕs-ì…9ozt¹Ô9дkhö­jq]“Û³¶j©·;­Û½Ž•»PÔr-“®» ½º=¯»ˆù¨¦+˜&e`ÐINSGnå”0 `Ù–’J$ `á”áDßm÷|ï¹Ïn®ÝvhÌÀ¶ QÆæ¶ÝÒEÞÝpo0÷½â®÷ŠÜÓ£œe¯´µBûŸ«^£8zSÖÓ\îhïF´GS€´rjÌ;¢ÁµcMcW.¸‚(gyÉ»ÌËŒ}HÚYaHe(±r€i¶pNšÌÂǽŽX#m&„ÄÊm£8›cv7®_Eî}] ÚP¦Í»ÔtãK+7+w,ó¾ou:Ž•A“´ó]NŽ$ƒ1ìÑLí\žqÚÑc0h2RìÌeÈ% Œ¶¶PUmš{˜L ¿–­Ë£X–ƒiNÍeüô|‰8@Ö«®ì,Ú]¥LÉÍ×icA•*8•Ëa£jSFm ³|k…÷[:o7šWnb6šnZuQ:]œâ„²a}Ý\wËbã×¥c“I‘lR¿Eî¯ÓUn½ªèï?ÄPKÛr.  ˆk?ŠLMrX4µ»[šbÙ»†°ÐsCb"tÒàÌCt»›¶ñ(´ÒvöÉ™íÌ^¹ŽÜYíÈu M’„%#pb&DÚ ŒlLÆ!Lhò$ój‚Ô’Û.bí·bêÙéfÔ¯¥¨'¿n lCNU¹Li˜Ãz7gµ8§{=&y¢¥:f5™zT’ǺÀ«dš•Í‘–@"-šò•ô ¼š±¹‡¡(hAL€&+ `¤`Ù™›ˆZêo5¬La=Çç˜OÛ‡,ªÅa*’ûƒwÚ«ô¸¾Ë«’XQ¾×Õô͇œíá©3.”÷$ŠÈsm‚·½ˆfÏgMî~ñ7½•ø ºšð6 B¶'wí,½©æ+XÝn)¡©"éÌëÝ}žÝ;ÏpîðO¿ÒŒA‡ø'ü²£ý·ýOëøÞû |-çÏñÀÄŸPíÇi¾>yl© ­–ÛEl–![þå¥UQ¶QG\î×DDDD?Áû#Lþ?—ŸÈæóf¹¶s’ºaázó,âVø GK{²îÉ3*kRZ#ë–2B3CÝ:îUœ­2>ô{²TüE*Û·ÓÂé¹wÜ­=$­­}}¼]}’<ñ”¯½äý¬®î¥ZЦ7[ròòÓÆTѾ^W³ ïy*†;î´;·FrMï}ã¹Ô}¢SÒs߃ô`¿Ù‹‰…­þ–ÅŠ‰‰F¯=×!ãUDPT>À°Šª¥ 8"yˆÅUQEQpRª*ˆŠ¨ˆXH|¦·üó¹0TSë=01cB ÒÒýåÑQ¥ÑÈ‹–•Hb_ð6¤Ó8šI4]¹….ŒùwKËyqŒ!•&йDžQŠÍ*‰Ò4+bqÜç“„jœfnà†*V9eÄ® ]ÂT1+.v×(rÁWDÔ ì…dŒ]sazãdK²[<¼i; (Š:a“-Rõ,4ïL׿s¶£òÐ3ŸVæ–Æ˜ÙüäÐgoH(qÄLñÁ¦y3¥B# Zzn‚ÌVC"Ní×›C,ãué]õÇ]0}*FgñÒ‘‰Ðp"? º¬–ýìÝÐ@~x1Žm‹A[÷ÞýžÏ);Û1V…ô-¸žlRóÔ F".5ìº Xˆf™)y¤0o«¹«{1}³vhpÅH-Ž]Üñaéݸúܦ#ÖYÄÛ1ÙÕ®XK;§NÍ y˜´ÌbPÄê@ãóå,à…5–MvýÍó8ï3&è¤P÷»^Ü‚ƒ†Øææ3 ©QŠ`ª¨‚Ó ÊŽç—N]–©¥a‚‘æHeè D4 ²ÄºÊBãAÌt­¡xãy®} 5e±åo&&fªEJE‚¬f%(w- BFÔi²yÍ袑òAqƒþ$ðW¯7mzëÎG¶:Å#‘]—;]a„ãª@$HiÑÜ€B‘ÚÆ;‹ë¢Ü{Mw`¶=*„xy¦‹l‹•“iö‘O&õ6:¼þÛVœY­žvrèê^kçw\,a"Ë‘]ÛópÊxpÂŒç“9¥‚Î1§—øv‰MÁ©ºèEð]¤­Ž²¯ÞÝÕßk*K˜8r‹Ð—•ŽGÄÛ×pUMKdi‡R)ϵ¼³8Í Ñ‘ÏäÅO´žUrñà²ìI/µÞ=·ÃÕà‘UŽi‘)E*¥2Ñ0s0ÁiLŸL½¡³ˆ„Õ|uØÇßM&T!‚ÂoV]È\¨ÄÄWoY¹çr‘ëmjçQ4Ä àð`;ʨÒcPÔENÌz²¤R(ZBK»·a •éªpãÃ)pFIs˜bCY o/[Íöo5DDcR¸YBAffcÁ(äÍ’&—=´Š]{ÒÌÞ„[ÉáSbÔÒV[Ûl8ÚÖ§!•ME¶u½ôp[)XY_2¥Ô#Þ‹e—¹]z£þ5<·PVZ!BPÄ*4wNé!ÂÒ±½HªY½o7.òõîZïúþ»M úÌý2È~®üDIPìôõëˆv(s)Æ R¢ãiÜâ.>˜G…è%OHž8¯¨°}¢(¨=,?0&É?åþ?ØÿC“úÿkýss9Ê^+£§3‘õ­ç(,$a Vår^«[[Ví†ß UCSÊÅM|Îðl~ŒðEHË[aÜ %ð8lˆPX2àݲ…؆+º2$1l•æ=ÌÒjŸ§Œ|äa¶EÆ5qûÛ7º›õÒ'W'Nsa>^³_>ç}E–3VËXiLEgnÍ65•Õ6hôl݈Hò&ïLz??@6óõÕÉ6ºF;d¦hË`i›v(n¾eh#Õ¥7ÃS@7¥TÏW/[ÌÞçtÖ¥é®ÂªÀf¾²ÍŒ{Z)©SlÚÖÖkh–ÅS¾Šiä××=ŠÖµLîg¶ˆªÉ‹{{µ³¼•ËbD–¶ywÍí«-;”‹u‚!pF Õ;W•¹¯V¾Ú¼Oo ⛥l9ÎG- ˜æW e ;»·`¢õ†ÝÛ#TÇïYo¤÷ª[]ѦñÂ0KÍžsY¾÷Š÷µ«X…¥€é‘U)¨EŽö© ²«„úDnÅ )1é8ÚŠ®j6WâX.©´&®zÍôd<àO dŠz›»žF]§]€G,U­ÄVv<ûZ¶$Ø'ã§C±\“4‹  ¹ÆŒûÖ×…m…7`ݶÈn´•â×½$x!2Õ÷m¢‰·mô{.›wkLJ9í¶LÊÐHì¡,×#RT-ööëNÝu›TÝa«(«œl‹”Ï7£G‹!weôÔmŽn¾½¬én‹SZnR:ž¯j€"öºž·Š( Ã|8ª4ÖójeÖ½í[E9ƒfÂSOmúíd#.·I`ƒ ˜Z¶jFá°“1$XÜŽºó#Q=ekÛ$¦ –ÃVñR5¡‡%‡D=H2Ž­ æÛµ !mÝs!JS»f/‰i'¸Ú­}0$QØÞZ²GZ§©6·l»º ·n ª†ç7»!³‹µÎkbeÞrðѾ›x×b$œ@Ú¯…·®èIîp4}8\üM{¦7»ÒI#Ö¥ÛØmȺËKÓ:HçR’LØÍPp‘IïvîÊ£µM ·rÄ™ §—*ùÙ2àyR¸d¶S]øÚ©±2,Ñ4U©Ü nZÚ Ñe3-O:ªýîíKˆ»¹›Öù[[ìÔï0¥°ÓS$ÐYÇÌSYXÄÌ#në*G˜#öº­B$ÀZÆÂÓ¯`¼3]•øžr'ÃóÈnr|/_.ç·€íC=%khï€âKж=nüj0`¶Ý6঩8š¸Šù‡®A8KPG6[®ùÂwPæÛ:Ú¤2¹€‰S$HåÄv<nØ,tî™0Ç"P¢bE6ˆ›¿!31Ú.Å„i1¿4Ô`útÓÔvÎñ³Ž«×¥»»«`ØùÇ)2 <ÓíȃʞÝ=$¡@íš'Å'T®-9±4Ónë9““’ëš¡ãošßTÛ¶ð€="¨¨qm[ܨ\Ðtó‹Çµ¾¶»ŒŒj% ¼›V7„!#"Ò†M-M%в3ˆÔ•õTݯAY†ÍJ7pe™‡0nMW£‘Ö—Mõ}›éfÛÝ&œUÙêѺ fÈ€¢¶¯så£;—½°ïCN·Hayë»|uy¼^àÔ¹kGUÝ#÷ÇyC&¾xé#Õ¤ö1­õçÂwÛO˜\¡Qnœp=6¦íV¥©#ô®Öžü˜®Ór­ÀžU%ÁPKdűïkË]ËK¬yë¦öóšò8Õ+©—ÓtN°‘Yܽv‰ªbé8O 섾´¨ucDl ¢%'iÝÚk{ã¼½­•€\b¯…Ú Ô.Ï¥Ýȣܷ܇jz ÜJn}öïÎ4"DÁR‘§D5:ÊñÇ–XI=z+Æ#’õs9û|{ƺ?9sÛ Ùîï/§Ú诟»NèÝuÛd礛!38½‡´á̼ç0æ^¢[)œóìÓWµÙÏêCýëûŒÿ_×ûÄß•‡³^Ð5ž;ýfÄ')>1ÕmU¤­Š¢©Æ´glWÐrÕUhËhû?¢DUDTýÿiúßáþsù¹Ãù9c™ßæ÷ÞÞ™ž;ìË}‚è#yDÃô?9[º¾bý£`P}±UBû"󽜽dU¾–TBÜž^{†´Ñ±¹ZµÍõÙìñëÈP5Û)Arõ¾5ˆ|P•E$µD+;`rÉÉÍ šß]È^‡Lï ƒ&Õ:ˆû|­:*‡¢¼öNyÎb"94æu¢fئ65$LADhkìÚ*(Šƒ„º (ÆS®±æ~À ¢‚ˆ˜"h(-b‚€Ý…†@Šª¨ˆª"ªª „ÿÏ)œÄ´ª«[—-¾Ù<†hðøÌ\Óíå*sþqÝ~w±)bUbR×)(ÈR<$½=Ã{½™–Cá++Qz¼YkNcUÇ•Übýíyz%$IõgÄ ß¦ú{˜Ùˆ®ÂËü9OËÄÆÝȧƫ7O h™Zo],B,êËÕʬ\-€I-…yüJ<±&´í¸NúqÝß5ëÞdamÞýÑ+ßO †ÍˆièÈ*QJìü›.Ôªq%Ù5¸¨ŒÓôtÔw «kÝnæ“7OÆü>ÖÛ|A›ÜHADít«[ñfC§1Mˆ‚e¹½J–êw£b‰ä}Ú¿‹„!y/×u46!kZ£²¬”ß:©‰Dþ­õÉËåS(ýÌÊ<€z»€ñ€“†Ç‚Rõ “dS¨ ä"-×`2²2³H]ÈØï\¤*“¯c]X·1˜öAD2Á¿Ø¦åñÁ/véDÞôÈ@<ƒZ Rʨ'_$š=Æ¥PUTÆ”cñ5Ô»ÛVË "R¸†¯‡p0êÛmæ:гußêÐß¹¤¹°Ä 1xû®«­Ö¯¡µ·¹½mîý½C}øtûÞ Ñp#冻^tØfË(i¸Bj6ܬ¼ãZhÅÁ5ÕÜÎo3à¡Ðf¢‚ZuUj¨ÆÑ±¼Fª™m¶&Žr!˜˜¹ 02®Ón‘ç8¤ˆ†5e[j•bÜ…Œ©&b×cÄÆ"ï‹] L“)—™ˆ&¨ëp(³ pÂTr¬•…®Ü†¥oЧmQöãíÉ4qE+L°™Xš-ºTmd&Â{¦×e®9jTcc=aš¾¶s¬ôÊîdm²#Õœm{¯N÷vÞV«œÕ$'±ªhÇ3Z9„°.QTŠU4“l’$n3L?:‰é„øœ¢_OX!ÔNÄ=º¦@ún9$ïB¡?XC¡ó†ð"C¾¿°ûUz==Ž€z <„Cž‹Á÷ûÏ·o·à\çÆëôCé3P½àà½â¦?éÿD%ÿ4Z·þycõD¬ÿcägý]ßÏ÷Œ-‰ÐE¬N5‰Å±µ­4­÷hªÖ¢‹o³bŠ¢ˆª1Uûù7ã)…íßmq ñÝ1ºÜ(¡£—ˆ BxŠèHF: ,¤LTÔLHà½j"6~§ |0P±„® 8P°×w»cŠG™±°ý¶MÍ”Ž)ÍܘH)›»Ùî«ZZleHIºí¶©ðµE±zZx×ìc 2(`Ä 12»b—Z%ñR¢Ý—ãpšõèöè “C“`tîKx´˜ñ΢;* MÆ¢æ9 ¶é“¢…tDi’ã|Ø»·̤IœøÃgðÔ|}øL^Û~ÖŒ¶„64rç¨þ½Èãa•/êÅu¶§Tˆ„ŠIòÌixí1¨Khǹ½5«Ûºí-h»XBÕ¬æØ×Gð„ÄCø\0¶ÛŒVßì'Œˆ®‚?Ÿ 1޵ãÈkß'=’`, z?Óf¡ 1ýö `ã<¯š†€õ¡E ¾éÀ¹¾i‹GM®£e&軅 ¡› Î{Z³é&íµ}$O_јԟ‹æ6¹öÅø†{6å‰è!ó> '!°Ía"4Ý^HqTsŸMm)_Š…A› ˆiÞj•ÏMí9+Y‹ÆâEóÃveœä}[ f;ŽýmLÆò¯tÒ 5FdByT±ò}¯~{§9srkaäâ-¥$ËTpª++[®åÏ7t²nl9ÈÑßYHŽ»–äi¶›&‹‰Öb=!•¿4±Þ½ÓÖ5œ„C†´¥&.çˆ)»rÍYºg.‰îdã,¢œ¸`kD>ÓŸ/ëJ»“æ´»*ÁXá}®{o«w-q£lª† Ážå%z'£[½•;–) CšFÚŸÑ%Q,@n¾öñîŸÁ¥‚ˆÍsâKû· y©+ß ´qÃ\†DÆðWâŘ Q„1‰‹å›öWkJ§7}¶ä¼Iî±¼`ÇE.Â,ш€]X5{LÝœ}Í6¬<ÛX®›d3t¤ âì²ã]ø‚^ÕÂÄCPpBr~›íìn".@¯ôùi{Üõ&·³áy{k”ðÍU•WÖc? 4qikQTêÙLJŽ….mÃ?,¿tä»4ýµå³ˆüHöì1 µ$¯56c;–›žn!¯Gøã»ã.<0üŸ›]j“`W,QeŒ÷nNéÁQLÇ#¥"B®Ù×Np¬+*7n[¯° ¸57§½°äåbRõ0Rè_s6¥užÍk¥ã5Û…€5Š„ßxä¹_tUGŠæÔnJçÇ.VQʯ#sc,NÉ/ã;çÉ 8û’ÂsÕ¡òÉXdÔ?éÕ,ŽutË£BôQkC!» ãtUPü[Av<­9*$bek|>_/ ^åê ~2t ô;<?:@€Á ñ×ýºHô w憒¡I$%<`aM5QÀÀ@3˽~}À  )ôg÷Eû€tUò}Â'=êä¡Áð þE ôR䇚€!CK˜¢]Pa@‰©M"á’”uCŠt…YuÀáÌC@ICT­´:$dУJ”ŒB…@b” 9¦„¡¬:¡r:T¬"à–€\Di#!AaˆBM 4Á‘΄ÂFŒš…¡¥1 À B¸t®E cÆ•(  h…\lš†… (…94KU1(R”‰‘¨SBR¸"TÄ„HÑ–1•t„€è@ A,eMd Ä- H ”% E§DL®s¡ZšMD¢À:l9iAÐ$J-"I„isKZTJb  bV• ŒÒØ" 2Àl­‚RÕ4”ˆ¨M‰)FPÄ*d™’ ÑP¬JК ÒR€&B%DM*ÒU BRR ´•¢!ÔAAT4£@”RR‚„áŠ#n)Àr©¤HÆ%€4¥f¤´ %%*Ð h¨4Dd@´­ @P”… MƒDHš"¤gHbeˆGBÕÄ‹jhs‰(@²äM"TJÄ-!@eш  QЈPP “e)©*‘6´Ò£¡¥RqJ˜‚¢C…]"4‹B+€t Ê¢&¡\J%“$RH@,”C©$HdEV@:!Ó87ý‚=Pp H"ð‘4½œ$„h(hrIRé !§˜¨G°‚LI˜Ð#N’]“,É£°¹PØ ¦ƒ*‘ PÈ!°Ð.h†Ð´a’4Ø0Kc‰GLTì%Ä–¥¤ LÄÎÁH‘.GILB8BƒT™t!€†#``ÎM4H"jBj‘ÁC˜JW¡4I  J†„¥PÁl¥²A$È% @gZNAŠD”ÕKŠ€•Ä)JX¦Š…Ä,.-„(ÎB6“D¤ laƒEÒ™ÙDÄ1‡32B@D À;({QDN <eE$ø€€*ÿ@O€’(J¤©(J²° ))  È0Œ# ² ƒ0À0  B’¤©"H’$‰ B *J’$£!K0ÔS‚‘DOâœP~úŸ°H€`€ ILªž(‡QDÂdI_`€2’+2„Ð…€ "®DÉOTi`†R4…Cƒ¹h! pA¤€Î“ Q²R(LJU €¬é¶ÒšeÐRb ĆҴ!Q¤•´#˜ØB$(£(TèrhV‘JqhÉTŠ(G$. ((¥ )„¤)ÑCÀ”Ä(eÄ!NÒR"4”†JEtP”…%4™iÒ#H£Eƒ"h Ã0h ¡¥2º"B•"†’%¤œ, ¡@ ÀhBÈÐД%©œe ¥ ††ÊiÒPR ЉB @¥!4(Pâ„æšA¥¤PÉ:Fš@( ˆQ¡j‘ @ц“èZL‰!¥iJ4Ôš¨”¢dª*JUZPbT¤R„02P• ( J ¤B(–…(…0iÀ&’•))’ª©*F$bZ &$4 HÒ”ˆÒ ” ™Ú„È¥:2°1H£ ¤)¥&R¨¤h2ì ˆ]E3M”«AT” Tƒ@ÐS’(]#Q­]°4‹HÄ!Jh@Ò´‚Ðä•ÒµH94™Z‡˜ t•B’TÒB‘"1-Pé]MJ% D)O&9`«lAŒd†´f“I‰*-©ÀºA4…¢Š “ª2J:§ éÒh0Ô›.% Pí`€˜`5˜™"¡‚"|Ñ$¸ª©•”B UdOª„‰à½£œ&šÐbÒh=U(p"R;"å@”Ò9(ÒÄ”…!a¬¶`¡ta‰`$]!@4-(]ÒØZC2È’)¡Ñ$H†Š)V‘hRÀåÒ”¹ 4-ŠÖ 1 ´L6Ñc0´A*°„(” P("U¤Q •Q(≡JSP C®…iZ)ª2€°`‰’Hˆ4LƒB”"¥* 3FJh)\Aj¡( € *!¬hɦ"tSˆ ‰#@Ò šG(šT¤B•pÀ…h¤rÄË0ÐІt… P%"Ràšd… J‘ …R6RšZQ)h£Œ1€¤DÁ¡[`$ÓJ€Q.Žª Y29µcQ°5lS”д€b %4‘44HP4 IJ”­)Bm&FÒ+@¸"IIB” %-"Q(šÃHš‚Q‰Bp´£H%¥RP&XĤ„*©Z‘¢2”Ѫ]A(¡B„)i ”9CJR]& " Šò@¡jã$S’iHCB-BªP"‘PÐP"ж Ci3!¬˜ŠU² P¡ˆ5‰d4 d]R…&ØJ1 ™ª¨PX‚”eÀètMC« ¤h‘ÃK£å°l䊪‡HPé$MP… iSS AÄ.Ê.’Õ(” a1¡rh°†SE‰” Š++±8f¤[N%% `‰B¹m$BäÑYCYÕ—T$Ë!1D4…:Ô ZX“ Q ÀHK¥Æ%Ó”–t4`Ø)Ò†¤0¸&P ‰C´)eM0› 9†$ˆatqˆ"Ä@!¡h ÁKA@ŒTƒJИˆ ] $ JP¥J1‰¬Fšj›(Lca2' É*é "¨΄(¥¥ÉA®„(Jc T¡­0%kDšA¥Ö]%%)´¹]–ƒè †‡H´ét6¡ÊiÌ[.B™vihÐÃh]\™œÌ1$F!üâö¢ˆ’* t)Œá1ˆR™ÂR P” %P‘)@QKE-QD@WIÁbªbMUDT”E1B"ê5ª¤-€ÓDAAÀ¤äДP´P!¤(ÒSIU@Ð…FØ…¡¥ªR€ T4 X2R6 H¨ ª‹mJD…+@i4 ÒDm¦bæ¡ NM)HeÒ±EP¦@:‰(HýQxw¦R- •ÉCªL®”¤ÄÒ’¤®„¥JC S+¥Â+§‘0P;©4:% S„Æ0†‚²ÌÂK ²‹HEE-(b4j&ØJ)F“"DÀ[ЪX§1Dp@M!¡ WÌ„@:ÂR‚@Ðat¨ H¥(P]9t”QICH.AÒ`AˆÐj(evLÓ¥,%šÀé) * ÂÌ%J&€Òé P¸R"P€•¨$ .SB%(Ò ¶t‘¥¤§!¤¥) DBâV…µ4ŽÐf€U5JI«96)@¥J(H•Œ!…ÄŠL`tF`дM-”Ú„‘ (piÂlPå]+HЉBgA™ lHP„’"MD­(RH`M¨É%²$H”P´m&$$)Ì(D*ÎS$ÔÒˆX€2:5,šG!Æ@Ðd* С@”Ò ašDÒ¹ „ÌHR„§ ¦±†Ò©@­etRJ˜¤…5–’”Á0l‚STÒaÙ+¡0é¤k5¤\&’© :p&”( L›Ri$’"B‚4` dH’¥4Å&E•’"ÐØB ¢ÎM#¢ÓM¤´0ˤÄ„Ñ+2“i ªÔ&щHd€ÒÆLíe2ÄJBÔPh2š Jã+‡BF ’B&JFs4 ƒBe‰‚ I@S \š Êì›X š2˜6M”Ó‚Vˆ]$Ba3JæXM+eL BéÊTæ0:Zt’PF“!QiÁ¥0UQ¤0†’–²tŽ”0CƒD‰hi1pRª¥ ¥±C‚i&—F``6ÂiÚiÁ  «-#‘4´–J,ŒIL3), )œÆ”ÆÊÒÄ;&¢Ó¶@×ÚœÉË@bH˜¨$¢B–˜#c&œJÈž¼à*½!òBЦ„ © )S+¤¦•¡Ë¥Ð„)2º”Êi³„1#j¡¦Ú4­!ƒN˜‚t…P˜GHf!§X@Ò†)Bi ´RÒ…*QJ…J»c)èÑ iZË¥¥(¤) PÊ €" é()T¥¢€¤Êè‰@ª⦑0éZ°ÒD Î´‘  0¦¡P(W ID+¥Àii(iÅ2º ªWhDÊh \‘2D!¤L:¥hh¥sPfUÂ%§5Bè  pº‚Œ¥ÖÍT£J*‘‰Ð¸ƒJPíŒiÛPæAY#l¤FI ΔF¥@¥ZC#ƒ5†–”Æ”¤"ZQ¥rº%耲ƒ„€ŠP4¸@ÐH*CJQJƒIAIHä‚€M&UˆIˆH4‹TT% a")ÔE(ÓUE ÅAB‘+@M.)f$›mE± ¡$m-’²EAE XÂå!H­*U I¨ 5C–‚ÒMJ4­ Ñ•˜‡`Bœ®…()‘5“ âG"iB•Ëh0Ä®–”Æ€ªEĤEK iZT¡Èi1¡1A H¥„N .i"AÄA*Ð"DBÌ!” Ðá`’%Ð䀤˜–+„–H&b*–GYÐBa Â2!¤L¸©1Dœ‚éVŠ0@¤«­¦“ 1bVÈè.Œ°”¥[í8ÒS€j…M  £,ŒÂ)¨•b¤% Q#9Ó´Pé´¹ Л1e Zɱ¡& ø ûH¢'xô.¢F"y%\H´#AYÒm&¡!ÒPU#VÐirQ#"hi“U:DÀ:qZL‘E+ r&Í«FÀ‘% €(`‰Ð¤RЭÉ@f  1²Ò”8@‘t¦ +@´-4%-!”1Ð`4”4«A©J”%fŒm$DÔaC@Q˜  $`Ð-šÉ1@ë‡é¡¡ÉT†’šGCRZ !@hÀ @Ƅ̺1D1lh¡€Ð´&iX4J@Ð`¦ ™MatVb!…ˆÒ+Š­+@M"bHØ0脺 )ÈÈD“F‘:Š"@ñ æ€*Ëd£ £.–$2iCA.ÔÕ ’5¨I a$ ™]•Âi 15  ÈÈë+²áД˜­ 6 ¤h Œ4Q¤ Êèp’œ­¤Ä¤!ÊÍ+ªL¦ŠC@èh6Š £E ,Ò]K…4¸ !—@”dÒÒj Õ-2`1!c@°i¤\†„+&€ SD‰ŠR$…¤Ô£ˆÊ©II´¹I YHYÒd4¦D‰‡AJå h% ÑE!lhÔ†—.‡#¥0΂œC …j‡HcP%€¢—Ή ©tìT„€Òè„& I´&'FRHÐM ª'T¹i¥ØL;ed ƒÌìˆ üQ:w {E0ª½¢Eê ˆQü@`ªôª÷ ”E>0€ñDõ¨e â ƒ( þáGõ>ƒý?Ï~ÍŽOáߣš¨¤5 Ë0eË11ÿ}ÿ§õ×;•¼§˜Ôøq°Ô*q­ÿªïlO¨Äqÿõ˜¢Ö6vý¸âwôç6ÝÛ˦½ÿUü÷¬4á?tе‘âœ4A¨0›tK]¹@(Åe¹¦&EÔG1]¸ZpÌ05‹—SmPè¯^¥‰èÈ>w´vKüÒï^ÍΛ£t18ñ<±,ìjÄÞìPdÌeRÙ2GáC*Uó>!ÊI±~œäß›¯„„/Ö*%O PžìÒ-{¦Ë} \Õ s Ù¤qâØóP¡™L#Z+q=1ŒJ$Ѐ¢Š–7ë38éžx \„%»[ëÇ!Ÿ1 pŒK~•ñƒþTqD"D?Bù8a£Ž"§ÿ¤±U6yÛ«mmØš¯Ñõ“šâ•E©¡Z[{nKcµC[H Þ ­·uëÿÓ¶w‹ïsG@aë³bF Žë¡¢¦/­šC–ÛVRé¢þÒ|}ðô_Fq›øÅ b~jÕ5ÁXw4ÄÌJ™·Þ²j÷³-iC&X+hC½îe´ç…1 ¶Ìnz†ràÌÐFõБ¸ìG!†œgtâÊçJ¾˜ÄlšnªdâFi+ž“(´Áºßµin‹O ƒ¿ëž¿Ž”1ADømâ(O˜%àã¶¢ö׆ÞAmÊȬ+5:&<ˆuS‡e*q©³ƒÍ+>Û›A”¶²jÅ8s†š"4Æ.ÅÏ:wCz"—•o’3 ë¶JŠÄZš³mN£WxòdÉÄÀ°†d2’Ú4üÝk…:©PbA1†®¢dÔ×uFQ2ijÍæb²q“’ ÊM$©2 ÛÛ&“Vg0x¦'oB\ÒZè“Æ’› SN)УFÿsAæÍÛé«Ñä+kD˜ \a<îÝ磱kºÍùôI“Œ]ëY·3£Ô-ÔÚ‡ Îñöh‘ô‹5–»qE¶tÒÄŒG:ÛçxXÖj²m®ÄŒ ôœ+‚hTxg Ò­ÝÑ µ¡² ¡µ|4¥^ª@̤`WÆË˜Þ 1Çó1Ó™+'Pø¥}¹Å1Kª %æ-r¬×Âl6™ƒÌàN2v,AcÑ¢š˜8°³*…¼K )'¦»¬è]x™Þ\ó–ÛÃÍ2óŸÀž=g_Mi9ºaükVÇÕtÑ[J|Sɉ®{ÿŒ„÷F#Î:ø¦¬«;zÿ£gôö…!Í-þJÛýÚØ×ã÷[fff:ÑK>›KÜ2DÅÞÑòþjþÀÀ÷*§Pù€§p ôE? €ý¨”–Ji¨¥ ÷ =º$‰éQ¿ƒîŸ ’³ˆ å‰B¹t¥ Hæ Y•õúé?¸Ïê]rÏõ¶îõ§ùLõÚóõÓûfqnöë521Kh-5œqïi?­ö# L='ć)vbÿ›®"Hñ5ùe3;ÿ^ªzµàýúké¬éEÃ;œæ©_*WüÇ^÷„… &qšN¶KÞOÓ—èøËÛ-ë:Ôlz¨¿ôi¥­ƒyôÏÄ?V´öòL$ïä•üªZÙi&/ï^šÏ—›÷ŸLªÅX¨¨(ˆÜúU1<® ¿ãbļ,O§Ç/þÜÛªÚÀ”€dGYo¨äpñÆ{{\ ÓJŠÏ4ç¼íƒ7è RÀ_–WâQo2q5ÿÙ´ØZš6·áN¿Œ§C¾œ… ¤¶0Ø,Š#ÕZœ¬A{TÆÏërð¸JbضN6ìN£œ»,ÅdË®˜x“B«Í'Þ籆Ï*%(,isß-‡†/ëŸnò_¹‚ë¢ã(ŸÇ[Ë&žÚäSOg½²klôžÓ¹®.$ÕˆÎgXõB…m³ˆ²(ÓÙ 5Ø;õZ½öHj7ý¿M¡š´X…\Ò+(…Çv‹š¾N”LM\ÄqD,ÓbÿJÓ^_¿ÔËO¿;òû DY ×{ Þ\fmS70Å-„ÓKòäãóõúmUô>‘s½E¸¥+Hº))¢Êd¨ãkú•¬bzoKàéÛ·ÒZøÄfêçþݯ=a°XéÜ’Ôns7¾îh9=pÁ<˜Èi™–àq çM’^ölrîE <”($£Myæì@¥x´¡^f‹Ie¶¡âTÜ®2Än³&?¦¦ó[)Y¥ŸñÏGw2àQˆ¯þý€ÔPÚ†â'R6 ³Š{6×í Û ÷V‘q¼‡Œ»ªzæé!uÔë]ãjcƒ k0 Tà! £ùºŒ/¥*kI…jk6¯J ÇPËq@ê,‹ª©¥¡äãÍ.'Qý’Ïß:›UŒÄÈ¢®5DödÎfŸëJ½qàƒÓ•ÄÛCmÙÂÓÝ_ò¯•áÖìMe²Ž¨+I†³ð$À-q›SM¦©ñJ"¶½lÈßÌ^³üo„…EÝÏl–ºÿ¡ÄÞjþµ'}ªð€Of•+­'¨Fmjj£VŸ»¶r'y';B­†ÊÎëÚç ŒsI®15j°2î»UÙ$’ëR¼¦@•‹ xÍ$£èÖs¾;È›hÊ’¦]tÆiŽ—ÀžkZÒëUNÀt/m(ÖãCÁ2Eý«E¦™)kUAhE• zc{·»·™-Bh¡Ã,Pv§Ì%LT¬ç­AÍÂŽ³}¿÷ò×ÞJ‡ž²A[1SëTZ›þî &Ñ€‰ 0` â™®‘áÈôµÂkvŸÇ350Xˆ'ý?Ö}×í=ëÅTü\ÇK$Ÿ„“È2ùÊç,0»L j6ËÌjšd¼„ ׎²v£²Øä>¾¯u¾—¢õÒ•²©ßá]X$Ó¥ˆ·üU˜®ñú-×4µÉÌz’­5Ǧ’5e…@‚væ÷œ #ÏdÍ¡Án!QGºZ¯?äº%XØšÿf»Ú²ðkçžæpq'ŠŒgzóq8kElJïh¸ICšé&k—kMôáìǼOãhöøS/4ÖÙ$qÑÍe>Ô(ú9µ‚³ž$^$²?åosÑ’‡h®bÄš@%ºA[™b‰q‹¥ysi§¬Ödd­y–E€³ûº{ÞVO£Ýpó[´XãZG’<ñªÚÌmb>¿ tâ$0‘ØÇ;S½õïë[ôã…ÇÌÍãŸÚ5+íÞL×°¸•R͵äç–&q•+ˆÝ°‚¨¦,üwŸ7Äm{\þûØXˆ½½Ä.ጓK£>îŽ0Ù@Ì%H `W¦Ý‚Q¡˜Ô $ÒbÏËmÕéþ¦k$ ³bÒJ ‰æ–Ÿ9‹»›tÃkhQ“ z䣋xM¡äZl©Qe«G´Ë®ì"Ñ«sdÜÍFäWgvîŸëɋº1*кZÔñ½ÝËŸS*2õêø­NÖ¯Ûá/NHûHÏ}ïYÒÙ„„95±›./2âœ`¦ˆ¹¼² ª©‚óe…Ynμ“±Æ¤I‰ìrNgSi Gp8Àéîæuör”ûà"TP¶?õD,Psõnùщ‚ú-T“óå…»°‰§¤!¨2…Fýu¯®a÷.XÍôS»}YW“hɊͨ㷠‰Ì™ryg$ÁI\‘)eŒee ¸Ã d¦ Ç 14’F®N³eÕÅ­K¥,\IB¿ÆôBÝ]íݤN4«h™÷™g.FËŕ旨.‚˜¸#X“#9íM‘U„ΫRlu-Òplvy?†XíÆN ¯ Ùª€¡1Ú¶\ßìõr­í-n©F€U¥¾}Òº>jþÔ¼%ã¹È¢¾ÔEZe.Dð’Wi!†‘ÛH¦³û ½kᄑˉ<1-Áƒ².ÞªÄtU%†f ³{w¤ØE+!£%ú­Ú»Ÿ»jÑE¢› ^óëªfbR-tMò»VûÓw§{Dƒ0hxjB(»ÏLà38“ Õì7)„ˆˆ¢­ÎS ©`ªxQ§'y™ùòÒ¬VC"Û6û•ê1ýê¢TK1 0! ˜ó8Ž^Ÿk¥} ½6ä¢òê.k6gNÓ*45íi󂆑E‚ÁHŒ¶’¾ÝÎrK‚tÁض]KtB‚PX¡›íâÖû/#•¨ìtÝ7kä;G{x¬yÂÅLUòíåÝÎnqöK6Y`ET_qH­>ŸÛjPWWE¿F¥î«wIMgáZڔƷIªt6@xê/c·ÂžÍ^o„ãâã™P·7™ ”ˆyâ p“@­ê-ô÷Ïäe°IB&8`Q!PĈ¢ÖÄÖ ‚må¦BÛ 4þ“0¢R´è  ªHÊ@vÏ'ZÆmi-lýËêêËXrfÄ4nøÒk ØËß¶Î ×¢»·øên¯[¹“·q‚vØÔWX“wk-Ù®Šì¾b·­Ð—JDnÜf½J5Z÷#)æ…ÕÄ›iгÕÕ_UÅO²):qÿ'”{ÐÚ%ºWffÉAF‰3Nnî–òÙšq)b2¡ÿ¡4<‘'y,8 %wÌ–¯ìÍÝ¿MUX¤”Ôd- Y…È À³¼nùŒ(1C’9ÛnX7­EÚÌpçA'iFƒR1æœCå53´Yëå`k(‘`…ç_†YT]»ÔfÈ™a‘»Æðp`”U“ÖLÀ¬+D¥Ó1ø:À}JÂcóçöæ…3—0Ô‘\¿HÂu§XÛ9鮨nÆs4RÌH¾a9»`I‚†UÒ<Êvïv›`â,wDŒÀY­ÿÀ™-×H¤BcÄ'™ýþwKÞyÚÖ|MKŒæîÝ™lȼŽi9?·{¦åœïêõ^•‡P¢NÚV ¥H,ÄDXT*B¯)+ÄÎR,s™Ì)aþ „¸ÐÖ»^‹LZ]jŠy«ÝÓ!0ñp•PÑH*‰½f'V’ÿÇ·Û¨3ÎÄ™g×ã¯Wüm›:Eÿ‰ÿcÁüß—eÑ?Ø6µ1ÝPvÿðÞ¼CCý\ÿ£{¶ß–­tQJn­Ô÷›#+¨“d†)&½Ý†ÛnÅ[TWa5ìÆœ-49 «8Þ]Ò®c"î\BîöZhõÜ)þÿéït~×Mlˆ3`É# ÝI¢¼@"ÓQ1ÊLÿ.uÆ›]ÎçiŒl映J²³@̤¢°È~þOIIþ-­8~Ãö¡þ›Ç{YÒŸD Lm³+‚aþdWöÊ%Õå­´÷þ¶ÕL«Z=D2>õŠtûö…I«ÄRÏË ß&`0Î0vKšV?FƧsçæß'¤{T€tï"‚ÀUÿ[¢jÇöä¹~þ—GM#¢áû[Ÿ¿’EЭ ÷«¡’Mǻك܆î¿ìOïê­òúŇå#ßÞÌ„#qoåuRñúž¼–I¤™'Çv4}ïw 'øsㆥ9Å“ö.+°/ïósìºÆ‘RûXm¿Å¾tZ`~'æ¬kæÔ=ln pAõS Óx]`™CÓi[ÃW…„Çô%" ^®O"œYÎZ«ÌòäA ÷2÷"SQI5@ÅLP$¯Ô‘?XŸAñÒ ôW·Å²‚Û9B’ú9r2S«Âi¤ @¡/” iB¬¤:3¡(UˆQ)Ê쎈x"ˆŸ¥Dýh¦@ñPöw{|üùÓÏkOóþ}‡è—øÿÌûÏÉøÞã·æj¬š©¬bŒ–XÐÇŒTIðýÿ֯߿§»§4â8p6s ÕqŸÉðÚ>þÆ#ÌZDþgßÎó3ñû9°×|MkðoóɟͽÚ3ëÏs"žbÄùÏà –dtx TtB®r_M富vʨÆÚc}svŸkç¿ÛÞ4ãûBÊý÷0Ÿ%ãöTfÿÏ¿àôrÂ^žKÇ_Ò „ç~Üö/4¬ø’Y[ëýÂ`«¿.Á3éûÍÄIˆ öˆ_Ãü£û'åï¿H‹?ªú}ü"RœÇìü¢þoH”à‘òßãgäƒþÿ3ïÇH'ø~ÙüÁüÁˆOD2|oL÷wÛÃÃ>‡@h£lëM£‰€ŒP$pE$bOðçî•ßç}ÐþœK,ÿ7² ?P*°Pz?«¾ù½4”eÍ'šIp™õ:%gEk¡ëj3¾x¼y[7!Λ“Ћ«ÒlŠ›[·j_ WE»±å~ï]îo2Äÿ$:v-¢#ÿYüÕE9H¾ù²™«íìbé»þÑC0Iç”®z«¾ŠlU•[HðÔbœK–è¶ú-ÿÅñÏÑH™þ¡3…uX#ê,ÝpŠ®á_§ó¨o•ź_ü‡q ¬Ÿ½»†·g>çªs”îv.Ì•õ¼seç3˜êŠ8™P±Öé¨7PbÕ*k»ü{u¬åÖy 1jk¯7­v 6ŽŠÇ›*ëµ<4 Y-Y‡4™Ÿùùßt_â»çð’´û}d¯råŸï²&SS/Z¿ŸÛ|v'vûQ!ÎØN¦»i'ù®õËåçßàñÇE¤¶R:È)Sl¯œ¿ùÕþÇôUÝðÓ?¾þ•æ’¾8Ås$2 n§gl¼Ò‚h_Íy‡—>ŠfEcÊ*QQ†*z-oØwêÆ˜ Ü bÚ† ÍþNûx7Ýà sÙ½Õ8”NÅ5¸£1-Yrã¤H)6Mºõ?÷}:wýd­u3,ߪ“!  ;'‰ñ2Õck±q4°ŠÉôÕh†%²9DóV öØÅ³Г]»6[›-̲úmHÎeô×ûdã "êû®D_4“ZtIóÛ½ÌÑVïüµ¤-«ÂâlVV [)¡gjÍ%+µ4nÙ«-Ù6W±ñÊ ÀK‘fFÚ#ß¶ÿM¨û‡˜ØHtäi vZ£³Sɰÿi'MÉÊÙyVö=Ïù7ŠáºßÑ5»<¤’–$Ä9¯öæÍ™š/Kœ¯¬œ¦Þl†$Pᙚó½ÅB˜ÌŠÄ%¿ëž•§¿¬>M˦üĘÑ[[~N®§Nó±¶DÖCßÔß9L¬Õ”ÙȤ”‘!]’ë¯B™—Îv°®NÖœ¹LJOCíû¨Åyœž})$xµç„ÊßÈÌʘ€Ðh]Ú2òR†WLEÄ-a1yq˜­­…ÊfXíp}qœk*¤Ôœa÷œ{¡ BÀgŒ+ bé“4µ¦Î\Ë]£dMØÌª‘$RI¯<„ë¼êåÄ@-°{gXàñ¬éÔ³¦ód’XF m1 <Î]\u¥Qºr  nÝ*•¨ #‚¢dõÝÌ7;•¨’NiBTÃàbÎÌíæ£G©¹ˆ.÷Ô UÂGÝý0¼Ó’N¡Áøn˜o?—óþÛûô”dál~çùr/¬ZþîÓðÓç¢q¥ý~|š(×"Û o zà‰³!¨»k½Wùó¢´Å±¯ d‘cD2Dn’YU:³«n9f¹¹™™ó$1&X•èŠ ´SòÊ1ý?OÓ|˜´Á/å‹-ýû/šñ›ÑÇüä´ÿ‹ï™•;˜’©Nƒuò÷}‡è0•ýYš?ƒb\ Åwô›di¥ðž†ôhF¿I´¿ã—ùNr›Ev™vg©ùüðü¦o»î'mOëßn¸üùÝÖ 55ÖØRdv˜:[¬(‰¼ÉåXA1¯Ëu‡íÞtç8×¼‰’–÷dD4 pVŠ:ñÀÎ|Ù'{HtG&O;p¶kUÞØýfÖtºù–ä‹lMdþþø[xlµ+^j&ˆœÎg…¤ß‹Á¨ý}[Ï®ÎãæŠãï,V >Ä&$©®Öù’zîíkÞxÀØ ¨’cˆŒë11‚Ëw ‰Œû™‹ ÓLÛ%¹Õ!B0‚G8“µÍ$gá.´·tyþoú룈Âhž"ï5õô±:Ò‹ü?Dîsïºæ•@bB˯Æ\æ WœJæ·FjËB9 ‘ ®R€«³mGóE!vÒ@I/¸8çèlŠ¥Ö(›MìÏW€¨´J–pƒ3'0iÉ›t—m™3M?þîšxðÏÈP¥**!W"y°ßo=v¾×}‚d”ÉË"JÁÁ€Ö㻽êÍMA A'lhRµBQ-%1D©RÌÍçK®­kVýÛg‚LÜ,ˆíúÞSþUzš)ŒÉ¯ð ¯ž™¬z)«¬hÏÙé_{Ë­°lÕï!6(¢Æ(Èõ³ÀEÊGX‚FŠV)F"<Œ¢n* €+nêp\x5c…ªIvôt;JJGy©oK#Œµ'íx%>w§RŒnŸÉŽ'½WöM%MH"ÅQ·ÿ¥,]{\ͺ åWm/üGMý‹=7)=Bj¹¸ -} ¼½NqØp2ƒ,4Ò m(±ÌÌp,’G&f’‰Êáu×TKÉ,6S6´+.t»Ž½²Ü Svo×u®9¾ ¢Š=sVÙìÄ—=Ùd\ì Òhðå™'Hɽç39C™s¡€•‘D`!œ6æÅb¤L´â/êе½ç¸_Êïë}¬<§Î;ÎN%7¸n©¬hEX_Íië›Í}}º·`¦Ç¡ž{ O]ªµ‰9èÌL¾“8R©|õmk\…–+ë&Ǧ'-‚'ù“†½í¿åµ"ã>&lE ý#õ/å¾ÖZú·Ì1Iýþ½œãðd°oOl_f½MôýÀ{U2tõ˜1ÌÄJ?‹`z£öéȶ<Ír½Š¸Öq^ݹ ŸÄÅ¡Çö?ðþ7ÝîÕ¤oå=üŒgÇœ‡ý§ÉùǯñàÏéú3'ògÜ'±µX ¿ôý³ôŒþéÃð7$ÄÍïãÝ@ïdÍŸ|zç… G"ü1©ú]_¶z=8ùÂNˆ`vga/ a@¶FOíß}N¤p¢9Cø4Ä~Ó¨ÏîþwvwßSý?nV³›«ïÏù‹á÷ל̎e׉¿9™Ÿ¯½½;M&ýw}çý¾Ó¿ÌÇñJ "hådÒ…ùÅŠî~ò‚Ôa—_÷–ÿhÿšÌÖþù[ÝCPX?­9_ñ»2Ÿ„ÄÄîW=·wÒeELj–VrQ>;Pbt`Ô”È .?¯»¸ÄAë>–˜øíÿUê¬Ïf!Há~•ÙåÇ×ÚÇœ6~Ÿ×i¿2àL¹™øÜçëj¬œËäöúXïW,mz–Õ}…xñ—Ú'ÁbƒùÙ…wÊ^¢ÈEœ‡çüŸã¾&L#á5h§ÎdM¦ïc¨ßÑòóÎÒBòOCî`‘Œ¶—w¶V'¡¤ýßKˆO[ÄXÈ1†šÕ‘mFéD"F©lµÎˆ'ôsNÖ/H?k7JnºÌPßoë½ÛXÄí¬R«lvúT†:S@iwæÄ»yUjêÌݤî¶éü¾ÕO*+=ŸÇsú÷½,nð®š;Z⿹Ëyñ%Î[›„ÖìáV<@º±dæ¦ ²J£ÜÚˆ„¿±I{›­ ùj¼§Ó?„Gb55ßtÕÑaöå@/Ò#4Ø<’ÌWEþƘÿægß$tŸtÀTüÀQ #4&) ÿ˜ïXaÆ—#¨N „Äm¼¡ˆ ~&¥u¼zºZºnè&'zNIÁ-NDÍYë{˜$;‡—[šø§ÑâÊN„²s_TÃ!ÙÝUoóÚMU hóÌ™¸yÚIc‘¸´“öÙü¬¿Ñ‰ ÂfžOÅB±œ¡Íë.T0ßÎt"Mí{¿I×Ò`4Ü3„‹Í‚31ognåE[‰¨sNÿ™8\Eçç «u'õít4Ÿ •3.cß\¬Xž[¶d^^: ˜;ƒÃ4…¨jˆÞ­\ˆ¬ç8ih˜a¼Ô¼ÿ§_T×a@n–÷B×,î¡¢ás„åÚñ›Dµ¼\U`ÿÚmOu´U|½^oCºêˆNsí Ekvb_í}ëÁ{SGšÛþª¥mlmIÿŒ“ìyþöD¦¬à‡îLÖ¹·Ú¦=ÝÎSyhh«É‹Ò3[‹ªiS3Ò$¡$É©åFeê*vôªò„+ •ÞеëÚÊ5Dz+ øœœ¨Ö¢¸‰«Sÿ>ÿ\äß$'‹kkµ+”“m¬Ÿ–9ùÌèò/‰==2½"¢bé¤PiÏ&îë¦áM(¾ŒÐQ‚+;kê,☡¡jjj¨9µË‹mgõÝ™`¯–p„›H¼Úë›ü©=¨‘9x­Ã<-3D*öÏê¨D¯6âú´£Æƒdµ¨*c¯)íþ‰ÜIþOyš4'9Å5§O§VQwWjÝa[Óû–ÅÕHº¸$âg<øš×„œŸïs£ gߪ’P~›¯oÛ/y–7Yí.{ƒÝñîwaä!pÖ–£7ÔÞ:Å ¯wZ‰,ÙT‘Vš`0¾Ûî´I'ÈÌz²Ä +–'·q4CÝÇõ¼éÕE &r„Íí#µÉ9Lå €“-qX,ãa‹Eæ¸ÄZ£ÿ+ËnÑÏ[×9ëxÓI UüJõ€¿¯WÖýë ]r[wºÛ €yíÏmQ6üÓÂF« šh˜/ –«Ô…1£PeµQ?_핵1 ÑD$tŽ<£jÖe£jPj”"•A¸eúïÝëmõW„Çžh'*í×Ò#ß®C»ÉêgÞ@;{v pã84Œ2ÖRK?kÌÉwfz>ÿEòòÝjßïùR5ŒUkm[Ö^W"yÏ©…g]¶@:þF•ü4H°œÔ•HD‡eS×Ì Mš„ AˆÜÝߺ –µÀ# øÂvö7”ãR¬¼Æì[q8fÄÿ§cð Ž\›5Á*g×äl¬êÁ-í‹Díê[$“«X“ÀðÀMu<Ü3 Î=˜8Uª("$&vÕ±’ˆ*cDí¶×]¼®¹¥°b¸ZÑŠªÚƒ¦8Á–Ôüa_™Ÿ£Ä翦¸2‰ä0ÆÚ|Ãûi½Òb+†Z¹^¸€€@qkIZ;«hIX]º‘T„ú–«KZn¦)н˜ÅM¦|DŠòòuó•QÖå‚`› %ÃyáAqýznÿó÷t˜SG­h¶Ù3%ÕCWHÜåX[Ä:u{Õ¯uJ.è—ǯs³«™K¤UÑ(3wßÊIÖtõUÝ,4‚§S“ØMWþó¾g¶Ô㻕t~þîÙtù~½;@éõ¦O]vk¾7UBV>ž¸.tñŒÎ#*2‹…ºª0È´6mÚv¦$Ÿh,ØV~;úî(µ)‘Ê.[ÅH*¤lX¬üóÄÍä1 g ¬Ž`QWgÉê¶*Rv«Ä@BI²î†"†%‚ÖXÛ+&9sûaM¾¥UsÍ1­n0Æ õj,R&ç‰à¾ÓµØ&$n—b¥ÝB[PÇ¿“ޣȳŒ9ÚœäSæÎõ~ý²WŸ*”ã`Ó™I›»Ž*?]Ö€@Ì“¡Åžós’*àt9¦ ØÚv[hR»BÈ“Z63k“üëAyk@§¹o/õ¯}¨lEµD{33VŸÕsº£ð·?¯1OtÇëOß%ãˆùkÝ·;÷$ÈéÑC6U½h°+!Jej£RÓ3 ÊqU'¥Î@ h¼NåÓŠ'&”ê¿ÉRLXÌÆ^~ÿ0ú+Ž!wM®M‚«±êš§úwiù³1p¨™ípÅç»Ï‚šú'U^y¤É­–JbÇaÎá€$ ±4fa‚G]í}Ñ‘i:ÊØ«Rm‘æÕR C^Z zó:ê{kCZ䀩5«MÓbÔ-—m°Ø(_îr£1r$®JîÖþÛ³`q(8—ºdÇs%Ñ“vÝ×w™a®Á‹W `ØÉm„ŒmH[!ÝZ4ú)|=¬¥œÚžúÙÇYâníÕ,k€ •چțÙZó{NÑÏ–/X©·I¯ [‘h­.ìd4žÃm…g2ñéºÇODèžtᄼÞåzët»¦n‰š·ú‹ŽEÙ}í¨ @OدìSÔêBůte°}³ùcò|éÍaßÙŸíïëÁüÝŒ÷ðõ·ãW¢[þâø"ùƒP˜¾‡ÓäÏ÷nÙýÕöÅ¡/0éÀ¿êLá…5ߣ½Pßçúo}ïÅýOË\þ|ÝpÁ;ÒËòˆ¼ÚÄÚò¸VÖÀ…ÅÊ0ÃeAbÄ‘Ïéõ’™èõ…naŒÚ˜t»ÀÑ‚!üO²¢õ_ ’Wö *h ¢ –+ôèŠô¹A|?hŠ"zÁ@ê'òUN¨ŸŒCÖ¡ìPö)"A?÷J´Só¢x'±CÖ Cé}Ol0È“©„¦(¯!N0H qJ2%Ð H÷®^IÉŽ É®IC‰hi(Ë)ÉL#È]ÎAÒtè)îÿ'킟±¹œ-â$ãe*[*ªpÃ*ÿke#Õ^nn{ü›Ð XÿoÝ;“>ô¡;[m¥µ©9@„Û`Iz×¢þå¼§ý¯¥sÕ^þmXˆŠLâûVü·L‰W19çqÓù+üò÷ßnjo>S£Ø¤Åþ®zcܤmhœsðòØ>58†l̈p¸ÖOøHàýÚ8‰ üBÛe²Ø”ŸÚîî®Ò7†›ùοUÇCïÕ̉yaXU:²}ýk)ÝÛE®©5=/•׎¢O^ŠY¦:×9m5YF-h™–tÀ-jÿ[+ÒyY‘5‹„­6Tc,yMMý=«Ü·4›[6ÔHž¬Ó'X“ [Ýy»‘$®¬…Óý®yÆM>Þ§Ö’7x›a×XFi±üæ©Ì’p@8‘ŠÎë3]¡? ”ïo¹§-„¢;¿]Ö._Ÿy×ÒãYÐ×kR6#Ιîy¨MÔH"gü¹«á^_S$L ¤e¶¢§#À”P­’­ö.ÿu {£O û ã»3þÙPlX;F #´c8ÀÔ'ÖLgÄÖCƒÆcE ,b•¥9¾x©\·žÙ ¶Á\šØBM1uÛd¶ÖÕ#xšåâ!ìüî@J|NˆÛ±ä@¿áÅ'5Kg‡Nar½g8pén¢麸1%»Û°Åœé)ĵuéÃã\ÔtoÇÉ´hÓ½¶Ë1£:>¤C],žê÷»S­‡¶Í±1„È‚®s’bLÊ%œùtn=éÛ¤qYH–7Üîµ9š¸sÔ»$(¨öä 5 ý߆Ӣ Ad˜«á|ï,P#%À? òè«'À€dYµ–†qsVüèÁa£+È'ÎwEU°ª[dÜ‘=×Î ŠuƒþÎj0÷½Þ÷»zÑx’§JeŠ[ólu‹¢íÜÌ~Þ ë|"õbM&qÏMÙ1$–ídðƱê¶S,Ǹ®ÚÓŽä¯ÖS槳ߛuè¯æõœ¥uŒòèv1'CÜÝ‘°½¹x›Ó¿ÑN>•¸‡¾»ÄjóØÐB¨Ð †â̱9kæ»Z¼ëãjÓS¬Û34R®D‚ãR ÖáEÑšõõik®DòÙF.Ö’bs®¼9³ªá·rQŒb±V*„kä.ÖöÃdÅU¦ó>¹öðŸ§C”Ï mE±¹qèçþqúô¥Dx¸[V½J¥C*þYòcPÔêþÑ}¸ÖΪΩX|ä×Ãè¥yF‘¹œ„ºÿÖו^Y'> y±—÷È€P+áÿ2Ë{!×)Ešþñi²mÏÄÏ’\ãOÛsü‹ë¼ú-øÚm×.Ôc¶Yn¹ôå@¯s«üÝ·øoÝî¢ÙåþG(ANˆŒþÀžæ¹¶D‹ú¸+&»n[(SŽQ:™v¯çY¤å?¶§jzŠÓâ‘@«&I1t~S¹¢½.«±šåË—ð»X¿¼hí[?Ží»4,`(¤ëegFÜÚ?‡Aû¶£/Zùý{ªtqÃj„" ÆÌ† sih¢®¢¼ïöûÓÏô÷®†J¯–†°ÌÌEKņÍuµNÝþ¶oµ£n9ëÖ@´Ó’#aRaåÛ"'/Î! ɃÐ+6Ÿ÷uì?ù©šÖ½R½¬qàîliíÛié¦kltjï71,Å[÷K7Î çº5ÀzÜÝ÷y§¥òJ´<Э:u ¡L^ëÞÔ½¬=lœÐ—ðÄ’É3Mä*)=wu¼ $a·´N![âÀ©së Ö•ÆN&fCŒb /‚R¯C3"ë(>xœþ>0E±}MíßóÚÿP¦f1`b^OÎ9bé´·Rx¡§oE‰(!IµbTÍ ù»­9q”1.¢›ÍÇŠ^59çÜæàØŸÛç 8?¼Z{”[¯Hf1%?fâmÕ}qª-O¥lùzÁ·æ³›±¯Nt1Ù¬zOúY»:KœÔÝ"µOóeÆh±’%ƒzÉÓMî½U„„¥0;Rwl4ì=ÆÆ¸$RÝ‹4¤œiéÙ³DÄ‘ÊWûÕ&R%d:•˜Ü³2YîÐ \»3“õ«HW™¼óy¤¹ñýo¤í€Z,c"ÆyøˆY*ª‘&³iåʽ„’‘Súïh¿»{öUY’V€ùÚ’yphB¢Ø¬<)¬Ý‡û ê~yÖJÚmKrIÕ(#<ìÙ&Šp˜oðNs§TÕâñ¦ÿa鸤œ‘Ìn’ß‹UÛés^Œa±Eü;ÚH·¤¯3·¯s4”þ¦{|½ªÑ(,@iÃÚòUéÏç¾Õ ãJ"ºëˆ­6+0óÑ:ÙŒÃ_ó¬“l…‚x„A-!@µhCh3ß=æ¼.SLŠ˜Ó=êk7‘1šZƒb‡Ô»†1"—Ìp Àèˆ= F”)I)U(©"D ©þ* ƒÌé%öII¶Ô@¸‚ Ã¥DQ2! ÙCI‘Ò!•éç (LhÒšÂh3è£E€¥Ð•þòïøüúF¥o๙õÍüÓ‰Îç^§S(ts)-ý·{Ëæª[ç"ÌödOío­bN;£ûw>ð¬zÔO´®ô¿¿ØŸÀ~lñeMìL—~•íÜ`?¶o%\ÌÔ){‡Î”=ld“†{XB‘†EŽ®€”E­øÛ»¶…ÂíÂâêˆ(0A„Y°¸X94ʲ¦e®!†%—L,VÖ.™gna°­—Z0q´d 9@¦-Q&³Œã9j­N%0°Á˸c›†mÔU†°Ïõtãÿ+‘'?wnJ*0–ÊQ$H¶•%Qñp`°¬-¦L³p‹©S ¥d¬±5™ÙÕîþó¿¡¡ç‹ô•…‰;[ÞJaÔ;l„×ã+9“P¾ëµ¡³J ‡tu9w°I›ô<äšf2²KNgÝÄT ÞŽØmúbÇÕ¤–«¦ ŽF»½í¼çPŒåh„˜¼@04^¢lÝ„¾™Ç 4S½rH¬Í*J )¡fܱÝ"±MNÿÔ¢õéÀk¹ŽDÚh®ë^¹3MqFäœk»eh’ì–SVÀ_÷Û¦s\2‡'ÇkΟaŽBMæ "D ¨Éy›DNZÔ‡j£˜æ.ˆÇEÇooXëeš(¡ËWL»ÎųEX—ªªCç5LœÒbJ—^b¦1´6§£=çÝwÄyãÀFhÌL˜¬·ÖO.]‡t¶_Ù¿k"JFéfU•©×·Ç5¾§‡ÒzGyig/Uš½w” )+LÀFáÿ³PdU:œqбnrÝ]Ñ«N’uC¤(¤^²ôÐWvÝÖÎ,éÒÕ¥¬Úô{¶NÞ1îÞ7§Æe/øBØÃ;¹À‚Ö,ŒTçÒOö*iÁO>Ýž4¶rø\’C_֩לˌÍÚon$ŸÅÊú$âÕ.ÔŸòËûSèôÞéa´öS*|££VÈýßLê ááŸÎå8Lkšs“úÓLaF–Ž ‘œÆ„EÖï^]] p’ôWZmršE£6¼šUuÆ=:0ÎMå)ÛØô¬†Æ®È»™Ö.²»rÂä†ãÅ5r|v¹Óœùµv(5)¡ÑøÌÕWWŸyû`§Ì×]Ld­""7©Àƈ1ˆ ^4¿©¥ñ6q‘T© …×\˜ùò?îæÆÌ;[+dAâæcIVü`´ªF6V†ÊW!–[Æ€M•×?®5ÞQä4Ê*Ƙ”Ø—Rh%Á›n¾çB ‡”ÉG˜Û$²ä~ü袃ÌjJ¦6 t*• m Ô)õ—­d*ÑÎÜÜÜyAì”ôP€D ,3ùTÄŠ>Nƒæ=Þp ¦t‘&ÐäBA¦2¬à«¼xœƒ[4¥¬š¡ÚB“ r"¡æÔõD•ê1ê©}[DX~lÃYú{C0ýðÅ‹$Ë,‹>!}`gÍ㪭UV'Ë51ˆñ9Ìâ_é é†gŠ:å1掌”5¹²&&ða”9a$Ë1ÑSŒŠßûhjfœqek Ô8É8Ê“Y&¬”LÎl4Ù¦àW;™y‰)ÒÅ'ørÛ~»ñØüÌ1ù”™‘˜(ö¾bžUBµcÿ/ŠºmJ ýÝkkÏ×ÐNÒœ¶4ùñ#GåùË nPøü‡W¶cþÎyâád÷¤1Ʋ˜—Ûß{ÝD£N+?ï;@ííÀ¬Æ;o,©|ÐÄêuÞ°Ôw›ÞeîCâyåôE[²ÖŪ0AÁÍh¨Q¦w[Q&Eá;lÊ|zâô¾h•{\Ôñv7ŠCm8 †¼MÖ¦¥8“‰©SÈsÔfy±ú’'ä­4ð þ^ ¿çö¯-FÏ/UºôÌœœ1˜ 4¥_z@^ƒüªhUÿÈîWp(ähþ–»H'b) Z¥ˆÒoÛ¦«Æ"Ëøm»x‘ö¦ˆ t®Rc/I4T x#£@¦£—~lôÊ|DUè—7¡vwÎr•(WÛLLä8Ï^ö^í©–Ì%½dÖ–$6áïvê $¸·˜BÎ4õ™ƒCŒå§S¹“6ÂÝÅñ¥…¶9I[ÎgSI½ÔÂ.ºWz¯w§Œ1êj`Ûá%³ÏNÕvˆPAL :d 5Š‹P’ÝÛxhÌGÜ–àí‰]ÍÈ¥O4geÕ2—ÞPҷƤ06yÀöÈó¡è™Ôòºa8— YtˆJ¸±"¡)ª¯íe-¤ã»b±ªfA@4Äêy™r-×* ÆcϘk<÷içÌ(ݨ‘xÖW¹æTýY¬2¶ùA¹{²x´ã ËDܹÛ~{ j+…©Æµ‚5‘ÏÊ‘¨,iŒ{—3fªém‘­ “æ K»7˜“ûÛS£õU/쪉Qæ·é"áµò™ô‰È¾­iã9©ó›zeTL^m#'zeÿÈOçÓ3ê¡R  9´Á9ózU?§ˆúù'c:b]éá1ÖW8ZÀ꤇SŒ q¹aN^«8»¯ÍÔü2òÉK×ÚRÕ.%‡~Ñu&›C•†vêbâ ™®* °UZõ‡±)»yV´•­(Úµ©÷•x\.%y•ڿ°¥* ¶Úú¾»1­§ÄÖf¨)(I‰™Ìêf 1zcJ.]ÿ_ÑZ¤I<÷Çô¬o9ì͉µ.4­E™5åÕ¼>½Öe"z²Q·6EÅŒá<1ÅÆRDáwË­Ýêè™âHnùš­ÖöJÅŠªHŒ®ÅÍq.õ)ÎdVL³ÆÌ!RÞd fj V¤£3jæw¹51³C˜uB&&S#6Ô¹4²œ2YaÅå½ :ʶ)i<´%lþpu¸TÇ1Í©ká°DQTO´(:š˜¼Üfd£ –B–jï3÷ ~Ej(‰ò舟p€û‘:‚pX Jv;@èŠ"pA° u>Äð=¸ ¡§&„¤( ƒ-D®’€¤ÈšM¬Hè(K4%íï~"äüÈ™Có€} 'îD¨Ÿ™TO΀ûÀ€æ =à)ñP}Ê~ ÊzA@Ââª'è?b(‰ÑDýâaPÿDN š‰¬ H©"„„(¢B0)"HBŒ€Ìór'¥Aù¢ˆŸ™¢²¨êT_bªyt@õ‚ÄOè‰Á§2A2ħ¬A&X’e‚™†j-`"pZi¨ˆ)B¾Ñð‚?„ìŠ"z„ó@ï)óP{•ðÔ*/¤=(¢'p zAüÅTö€ ~`)Õ2¢@(¯€€Àr(‰úP0(@óP J¢y>‘ˆ”Á?‚ƒ÷Á=Ê©õ>øÿ€ž*ñE;ÑDOä¡ú”C‹ÐQÂ(‰"ŸTO N€ªÊƒúTC¢&D”Cô—æ÷α?ØH‚«•Æ¢UÃ*¿ænÒH8LTÖ&Ë`Œ1¶.Àu›·T+b²Øa5™ÐÊKÄ4Ò½Ñ,´­4*Ô@¨¹!)A37¤_ï“ã;Ìû›`6`nÙ±U†[læ<ƒ;9KFÖº6©£¥VeFG®Õcdؤov7¨$²Ê2YB)/n…îë RÞË]Ý,E‰©ÒöQ€áÍXaqê—2µcUÃXÀD±Ñ³G-e‹)!G2™)úPƒŒ’VÚVܶ2ïîLÉ%“a3–C™Le¬µŸó]’¬&‚À®V¬¶ÚJ@µŒ¸2V~ÝŠ‚âHs"~„'/aúÿ7q¢¸ä¶í‘5 Úžî˜Àí«Kg;þ†\¾§¢Ÿj'LÓâp #’ „ň$ ÓgGJ »b5µ4±ì{ íqñ×k¬°ÀÒéµbhDŠDoszwKe/I“k &¯äušs:z^ØÂ<œ2§P.$Í›7t[ª³M7‹YYvÕ«X·S@©Ldç¯'ï^A@˜`ÇKjÙ<+-¶ìž²’p 'õv¬LB•Áˆ¤dkøA/'‰ X’˜,âS#oÐë .ÉSí¼å}½R1EácJŽ¥“«LŽ eS íË ëôÝ]vì -*‰½­ëÃgDÀ0„!€Šrj)ZõK§KÍ×™N&³%¥dÆæg 1!®m+IÜÍSœM­8„¨ ˜ÖµE Q!QeeˆN]Cb/'—ç0Ç1g¡iÒ©PÀ†²bˆcµ¦-»Fš.MF÷;³¹Ö Ö7vðvL›C´B²óoIö´È›êU«S33]³#ÎL“˜²ÜrKÜÅe™ŠÝ´î㌠Âi39dÐ.fª¥’u—¶ØKz0êwNc{‰¯FÞ³tr&@“|ÆKM&S¶Ä·œÌIc¶6G]ÖÍÌŸu¦3 C'<ˆˆ¬"Ȇ°àÁl \³VÏYÒÈC!mÖqÑt•椥[¯;b‘›Ò­X]-Æ™µ\7nòöõûeæFk‰“¼ËÂahšñÆšÌ5œÓ˜ãn“SŽ!t°wp‡”ã$ºÕ“LeBrÑ•à—cºµ0y­td0\æ’5}ê g$b (®gÝs¬Âi] 2ú8ÝŽ³­§8Ê=Íd¦g&œ6ÒÅãÔ“ŠX)ÓD‡Bª2"ã:Ùb£•ÕÅ(É[ìOœ·!ˆÓ!Û&-íÕpW¬·`U:ÝÝ,†\›£“\mÌxØ ÕBV!¶å+–&G0„‚¡þi•®™©Dœµ\Ê8vÆ-»‰:ç¥$íYÄMZ8öݤ­¾ç: «j뛳K•Š´ÄÀ†5õÚéÔXj5WŸEÔbzòtÅÅ¾š«¸½Û w^]îc ®Æ@„!*ïivnî°¹Dl±å4L5[wu¢ÛÚèdsAZ,‚¦JL¨Bm‚mejÌfAe0Ae©Q¿ ž$èžÇJG}ti9V& 6ÖûmŒ‘ËjŽÁ ‰Ç«dVQMrªå¯uR±€ÙºqܪpÁRκἉTgovÛ«±Íótð UÂIºm¸çwjBPCþŽÞs;½Û=–æBÉDfRbf*R-µ‚"ÄÌD %¬‘«vÚ¥Ò*•:@‰W¯eÕVê‘Ö‘›TfTb¨Ä̘Úá†#Îh !0fî—.ÔjNÞ8ìíî¡4’¤î)̆N{»¨R&Š’ à (fcfHÆ0³m±^• ‹@]Ò’nLÜH­IÁ ÉI¼ÐšÄVVÚW¡ünRÌé˜#%‘¶6Üzª£¼‘ëÿ-ß^‘õW ªrzºYü’”bogªk|t¤[ÖÅ»ª¢Ò©[`§ZÔ2=«µ›ÛC‹ÜëÜ 6ÒÆ±eZËh†Ö¨ hão.Í7º.¾ª_èÝë4ƒl¾^-ëã• ^[»mÙáѰ'1‹nÍ”Ú!âË0Í"ÚR uMÜž#+­¥…¡-ª7ý… PB@Ø{­-¿Ê%:½¾/3ý‡ÊB¬Ä‹4DºÝ6Óu;(iLb )ŠÈF€ ¦"&¢(bN.;¨¬™žrŠ16í'hªj\­ÙŽ-•È­AމÀ±~­@bA‹ûõR»=›b˜¢Ø›Ý%bŒƒ³u:=-køÝ~êr¯5z¶Ñ€¸¦5¤¶ÏlD†^ë¼lèèèê†#e¡kŒVBr(CkvÔ!×O5Û²u!e × Ž@”J*P F[m–ùÍž:vÚEd[Vì»U7M{©¢‚Ú«µYºµ<±–Ýv4µ ¹ð:ó3Y±ê/;¹×°è¤FÛ›” ’¡Íç4Õ4¾%cu™k™ ëgœ‰á' G´ªmÝ›–æ×L™äž4k¬øKy¯?(!Ž5#ç*°êÔ|„ÎÛjë;z!æÉ͈»:lh[ ŠTkZ7V›ýœ[Ó8Mé-Š'¬àé…`•s9Îh$QQ9fï2h!Jcû_]%tM*¢ŽW!ÅBÅ$ˆd`$búN͸&|ëÜŠMj—,ŸU¦é®ôÖ›"Ûv¶¡z9ÄïÇ·}}¥l6¿"ír¤ÎØ ñPêê[.õPÝÒrP@|Õ¸šZ‰TT‚ÙS§ë5xk•“1¢¾Z×o]1æ 0ùÆ,âÍÁ€1<½©S6l#O+PÉ,œ¶ÆXðo”–•©9íwlþDâ*äòsNíR$›aTw›‹¶ÂmØŽì]<²]SVëͱ›,P†îD†$¡¨Ñ¾±yÚ*Ðà½g‰egYÛ4N% lDó%ïYµX”·&ë[2‘I\’*ÇD™³brçjnñÑ•¶’Êg6]tv3AléÇ"e@J5†_ÒI”D‡äxÇbYlÒjš‡%\»ºeÔÊ‹ö&çΆ&,`jj…eIF૘Ò.DųÙ›LË4ʵ͖˜©,­tcQ§ Üj5™’©–¯Ž?‡.èíõFQÇ›diƒVÐ0>(6È“î‹c4<4ÌɆ`i¥×Æ“¹Nî¹Óe驳ŠM”–ÏyœF½ÚnÑ÷l½hÅîëxgK!ÚF-) YlÞÚu¸w8öå%=ZBçz¬¥hé¬-UC7fÎ8œ6ðmÕP^p+PŠ)ª¨Qãpcq…!È¢h± ‘«™1†è̶ìå«yÙR¤‰®pNiÃmóu†Ø¯½·7¹®Æ<ÌÝÇÛcéÙ”0vö“Ð$Þvs§y/C G‘=ZY ãh·Uƒ‰§¯pMév*ª¦×Ív_WA7%:K’Ë´ºÐ¡±U« Ò•=™ ¸¤î-©PÉjœ…7z<Ôu7_)¦6¥íÒëY•¾vš¦„å–ÇÊ$r‰ó»Éš®¶DÇ ¶Æ¤f6¨¨Ì{Ǹ²e²ÁŒ  ÚAhe,^®’Ùl!®Ùµf íÞ)JMéÈzPÑM7XÛØöí§þ5Ë=¯¸è1zÑ;c]Þ]ÖJ5¯œzT+|ÌÒIâ*¨à2 Ý6G`š.×@«a±W1G›ÓS6Ç- /LV-©hV9O] Ûºã4Œj*H” ±nÚª*•¦¦Á}YD;$Iódl,¸jša2XºÖßÒØâ&*”5Ã12ý4XH ZR¡´TxÖMrØ.Ž¢‹wÍ{Ú‰žÓ˜Í×¶oíºì©Ö8ù^å׬Ôâå`ªQFlk!›ËeËZà;a’ŽW¦=Ž*Y[Ý-€Øt„W°–¨€{4SfÚÍU‚±¥ †šuèŸ:¹8 O_GgN!L/)C¹ëV‰Û»½¤{(Èd{¨…Œzq«¸ëG:p*š£ÙÕh·^2ÅêéwŽÞ+׉«:¾3ÁÊ{ÌӨ͌Ñêã¤Nör–Í'Y½ áYÝâw·Ã ½¤ãœõÙ¹îV£Z2¹=¶ìæt/Zôݦ©) !"¤"LŠnÚ¹§Lo#¶Î:L{)#"UKh·štq¿Æ-G‡Þj”õöãzæ47ú-[¢4–œ8Ç\*­±ª©6îÅä°}?FfŒ×bw‹]Ô“1àµîä´#̹ k‘20„áWQ©‰j –ªÓ4’" Ö¯ç"O±…¯7fÚD Òšãѧq— Yn: –<ÉÖD©qšÂ¦ô®ôtuÙÏJm–Ö6\æ)4È€ªÐæèæ¹ù­]f 1jçSD€Ó*¶fàˆM5i‘G)Ù[ÐVmw¹ËV 1Y¼*—hRͰI6ÊA„Reµm"¤ë#ÎmzOïÈãO¨ (ÒÁJ'ÞOO  uÿ‹¹"œ(H-›šnetworkx-1.8.1/examples/algorithms/0000775000175000017500000000000012177457361017266 5ustar aricaric00000000000000networkx-1.8.1/examples/algorithms/davis_club.py0000664000175000017500000000644112177456333021756 0ustar aricaric00000000000000#!/usr/bin/env python """ Davis Southern Club Women Shows how to make unipartite projections of the graph and compute the properties of those graphs. These data were collected by Davis et al in the 1930s. They represent observed attendance at 14 social events by 18 Southern women. The graph is bipartite (clubs, women). Data from: http://vlado.fmf.uni-lj.si/pub/networks/data/Ucinet/UciData.htm """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __date__ = "$Date: 2005-05-12 14:33:11 -0600 (Thu, 12 May 2005) $" __credits__ = """""" __revision__ = "$Revision: 998 $" # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import string import networkx as nx def davis_club_graph(create_using=None, **kwds): nwomen=14 nclubs=18 G=nx.generators.empty_graph(nwomen+nclubs,create_using=create_using,**kwds) G.clear() G.name="Davis Southern Club Women" women="""\ EVELYN LAURA THERESA BRENDA CHARLOTTE FRANCES ELEANOR PEARL RUTH VERNE MYRNA KATHERINE SYLVIA NORA HELEN DOROTHY OLIVIA FLORA""" clubs="""\ E1 E2 E3 E4 E5 E6 E7 E8 E9 E10 E11 E12 E13 E14""" davisdat="""\ 1 1 1 1 1 1 0 1 1 0 0 0 0 0 1 1 1 0 1 1 1 1 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 0 0 0 0 0 1 0 1 1 1 1 1 1 0 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0 1 0 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 0 1 1 0 0 0 0 0 0 0 0 0 1 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 0 0 1 0 0 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0 1 1 1 0 1 1 1 0 0 0 0 0 0 1 1 1 1 0 1 1 1 0 0 0 0 0 1 1 0 1 1 1 1 1 1 0 0 0 0 0 0 1 1 0 1 1 1 1 1 0 0 0 0 0 0 0 1 1 1 0 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0""" # women names w={} n=0 for name in women.split('\n'): w[n]=name n+=1 # club names c={} n=0 for name in clubs.split('\n'): c[n]=name n+=1 # parse matrix row=0 for line in davisdat.split('\n'): thisrow=list(map(int,line.split(' '))) for col in range(0,len(thisrow)): if thisrow[col]==1: G.add_edge(w[row],c[col]) row+=1 return (G,list(w.values()),list(c.values())) def project(B,pv,result=False,**kwds): """ Returns a graph that is the unipartite projection of the bipartite graph B onto the set of nodes given in list pv. The nodes retain their names and are connected if they share a common node in the vertex set of {B not pv}. No attempt is made to verify that the input graph B is bipartite. """ if result: G=result else: G=nx.Graph(**kwds) for v in pv: G.add_node(v) for cv in B.neighbors(v): G.add_edges_from([(v,u) for u in B.neighbors(cv)]) return G if __name__ == "__main__": # return graph and women and clubs lists (G,women,clubs)=davis_club_graph() # project bipartite graph onto women nodes W=project(G,women) # project bipartite graph onto club nodes C=project(G,clubs) print("Degree distributions of projected graphs") print('') print("Member #Friends") for v in W: print('%s %d' % (v,W.degree(v))) print('') print("Clubs #Members") for v in C: print('%s %d' % (v,C.degree(v))) networkx-1.8.1/examples/algorithms/rcm.py0000664000175000017500000000164212177456333020422 0ustar aricaric00000000000000# Cuthill-McKee ordering of matrices # The reverse Cuthill-McKee algorithm gives a sparse matrix ordering that # reduces the matrix bandwidth. # Requires NumPy # Copyright (C) 2011 by # Aric Hagberg # BSD License import networkx as nx from networkx.utils import reverse_cuthill_mckee_ordering import numpy as np # build low-bandwidth numpy matrix G=nx.grid_2d_graph(3,3) rcm = list(reverse_cuthill_mckee_ordering(G)) print "ordering",rcm print("unordered Laplacian matrix") A = nx.laplacian_matrix(G) x,y = np.nonzero(A) #print("lower bandwidth:",(y-x).max()) #print("upper bandwidth:",(x-y).max()) print("bandwidth: %d"%((y-x).max()+(x-y).max()+1)) print A B = nx.laplacian_matrix(G,nodelist=rcm) print("low-bandwidth Laplacian matrix") x,y = np.nonzero(B) #print("lower bandwidth:",(y-x).max()) #print("upper bandwidth:",(x-y).max()) print("bandwidth: %d"%((y-x).max()+(x-y).max()+1)) print B networkx-1.8.1/examples/algorithms/hartford_drug.edgelist0000664000175000017500000000443712177456333023650 0ustar aricaric00000000000000# source target 1 2 1 10 2 1 2 10 3 7 4 7 4 209 5 132 6 150 7 3 7 4 7 9 8 106 8 115 9 1 9 2 9 7 10 1 10 2 11 133 11 218 12 88 13 214 14 24 14 52 16 10 16 19 17 64 17 78 18 55 18 103 18 163 19 18 20 64 20 180 21 16 21 22 22 21 22 64 22 106 23 20 23 22 23 64 24 14 24 31 24 122 27 115 28 29 29 28 30 19 31 24 31 32 31 122 31 147 31 233 32 31 32 86 34 35 34 37 35 34 35 43 36 132 36 187 37 38 37 90 37 282 38 42 38 43 38 210 40 20 42 15 42 38 43 34 43 35 43 38 45 107 46 61 46 72 48 23 49 30 49 64 49 108 49 115 49 243 50 30 50 47 50 55 50 125 50 163 52 218 52 224 54 111 54 210 55 65 55 67 55 105 55 108 55 222 56 18 56 64 57 65 57 125 58 20 58 30 58 50 58 103 58 180 59 164 63 125 64 8 64 50 64 70 64 256 66 20 66 84 66 106 66 125 67 22 67 50 67 113 68 50 70 50 70 64 71 72 74 29 74 75 74 215 75 74 75 215 76 58 76 104 77 103 78 64 78 68 80 207 80 210 82 8 82 77 82 83 82 97 82 163 83 82 83 226 83 243 84 29 84 154 87 101 87 189 89 90 90 89 90 94 91 86 92 19 92 30 92 106 94 72 94 89 94 90 95 30 96 75 96 256 97 80 97 128 98 86 100 86 101 87 103 77 103 104 104 58 104 77 104 103 106 22 107 38 107 114 107 122 108 49 108 55 111 121 111 128 111 210 113 253 114 107 116 30 116 140 118 129 118 138 120 88 121 128 122 31 123 32 124 244 125 132 126 163 126 180 128 38 128 111 129 118 132 29 132 30 133 30 134 135 134 150 135 134 137 144 138 118 138 129 139 142 141 157 141 163 142 139 143 2 144 137 145 151 146 137 146 165 146 169 146 171 147 31 147 128 148 146 148 169 148 171 148 282 149 128 149 148 149 172 150 86 151 145 152 4 153 134 154 155 156 161 157 141 161 156 165 144 165 148 167 149 169 15 169 148 169 171 170 115 170 173 170 183 170 202 171 72 171 148 171 169 173 170 175 100 176 10 178 181 181 178 182 38 182 171 183 96 185 50 186 127 187 50 187 65 188 30 188 50 189 87 189 89 190 35 190 38 190 122 190 182 191 54 191 118 191 129 191 172 192 149 192 167 195 75 197 50 197 188 198 218 198 221 198 222 200 65 200 220 201 113 202 156 203 232 204 194 207 38 207 122 207 124 208 30 208 50 210 38 210 207 211 37 213 35 213 38 214 13 214 14 214 171 214 213 215 75 217 39 218 68 218 222 221 198 222 198 222 218 223 39 225 3 226 22 229 65 230 68 231 43 232 95 232 203 233 99 234 68 234 230 237 244 238 145 242 3 242 113 244 237 249 96 250 156 252 65 254 65 258 113 268 4 270 183 272 6 275 96 280 183 280 206 282 37 285 75 290 285 293 290networkx-1.8.1/examples/algorithms/krackhardt_centrality.py0000664000175000017500000000144612177456333024217 0ustar aricaric00000000000000#!/usr/bin/env python """ Centrality measures of Krackhardt social network. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __date__ = "$Date: 2005-05-12 14:33:11 -0600 (Thu, 12 May 2005) $" __credits__ = """""" __revision__ = "$Revision: 998 $" # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * G=krackhardt_kite_graph() print("Betweenness") b=betweenness_centrality(G) for v in G.nodes(): print("%0.2d %5.3f"%(v,b[v])) print("Degree centrality") d=degree_centrality(G) for v in G.nodes(): print("%0.2d %5.3f"%(v,d[v])) print("Closeness centrality") c=closeness_centrality(G) for v in G.nodes(): print("%0.2d %5.3f"%(v,c[v])) networkx-1.8.1/examples/algorithms/blockmodel.py0000664000175000017500000000570112177456333021754 0ustar aricaric00000000000000#!/usr/bin/env python # encoding: utf-8 """ Example of creating a block model using the blockmodel function in NX. Data used is the Hartford, CT drug users network: @article{, title = {Social Networks of Drug Users in {High-Risk} Sites: Finding the Connections}, volume = {6}, shorttitle = {Social Networks of Drug Users in {High-Risk} Sites}, url = {http://dx.doi.org/10.1023/A:1015457400897}, doi = {10.1023/A:1015457400897}, number = {2}, journal = {{AIDS} and Behavior}, author = {Margaret R. Weeks and Scott Clair and Stephen P. Borgatti and Kim Radda and Jean J. Schensul}, month = jun, year = {2002}, pages = {193--206} } """ __author__ = """\n""".join(['Drew Conway ', 'Aric Hagberg ']) from collections import defaultdict import networkx as nx import numpy from scipy.cluster import hierarchy from scipy.spatial import distance import matplotlib.pyplot as plt def create_hc(G): """Creates hierarchical cluster of graph G from distance matrix""" path_length=nx.all_pairs_shortest_path_length(G) distances=numpy.zeros((len(G),len(G))) for u,p in path_length.items(): for v,d in p.items(): distances[u][v]=d # Create hierarchical cluster Y=distance.squareform(distances) Z=hierarchy.complete(Y) # Creates HC using farthest point linkage # This partition selection is arbitrary, for illustrive purposes membership=list(hierarchy.fcluster(Z,t=1.15)) # Create collection of lists for blockmodel partition=defaultdict(list) for n,p in zip(list(range(len(G))),membership): partition[p].append(n) return list(partition.values()) if __name__ == '__main__': G=nx.read_edgelist("hartford_drug.edgelist") # Extract largest connected component into graph H H=nx.connected_component_subgraphs(G)[0] # Makes life easier to have consecutively labeled integer nodes H=nx.convert_node_labels_to_integers(H) # Create parititions with hierarchical clustering partitions=create_hc(H) # Build blockmodel graph BM=nx.blockmodel(H,partitions) # Draw original graph pos=nx.spring_layout(H,iterations=100) fig=plt.figure(1,figsize=(6,10)) ax=fig.add_subplot(211) nx.draw(H,pos,with_labels=False,node_size=10) plt.xlim(0,1) plt.ylim(0,1) # Draw block model with weighted edges and nodes sized by number of internal nodes node_size=[BM.node[x]['nnodes']*10 for x in BM.nodes()] edge_width=[(2*d['weight']) for (u,v,d) in BM.edges(data=True)] # Set positions to mean of positions of internal nodes from original graph posBM={} for n in BM: xy=numpy.array([pos[u] for u in BM.node[n]['graph']]) posBM[n]=xy.mean(axis=0) ax=fig.add_subplot(212) nx.draw(BM,posBM,node_size=node_size,width=edge_width,with_labels=False) plt.xlim(0,1) plt.ylim(0,1) plt.axis('off') plt.savefig('hartford_drug_block_model.png') networkx-1.8.1/examples/advanced/0000775000175000017500000000000012177457361016662 5ustar aricaric00000000000000networkx-1.8.1/examples/advanced/eigenvalues.py0000664000175000017500000000207612177456333021546 0ustar aricaric00000000000000#!/usr/bin/env python """ Create an G{n,m} random graph and compute the eigenvalues. Requires numpy or LinearAlgebra package from Numeric Python. Uses optional pylab plotting to produce histogram of eigenvalues. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __credits__ = """""" # Copyright (C) 2004-2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * try: import numpy.linalg eigenvalues=numpy.linalg.eigvals except ImportError: raise ImportError("numpy can not be imported.") try: from pylab import * except: pass n=1000 # 1000 nodes m=5000 # 5000 edges G=gnm_random_graph(n,m) L=generalized_laplacian(G) e=eigenvalues(L) print("Largest eigenvalue:", max(e)) print("Smallest eigenvalue:", min(e)) # plot with matplotlib if we have it # shows "semicircle" distribution of eigenvalues try: hist(e,bins=100) # histogram with 100 bins xlim(0,2) # eigenvalues between 0 and 2 show() except: pass networkx-1.8.1/examples/advanced/heavy_metal_umlaut.py0000664000175000017500000000370412177456333023123 0ustar aricaric00000000000000#!/usr/bin/python # -*- coding: utf-8 -*- """ Example using unicode strings as graph labels. Also shows creative use of the Heavy Metal Umlaut: http://en.wikipedia.org/wiki/Heavy_metal_umlaut """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __date__ = "" __credits__ = """""" __revision__ = "" # Copyright (C) 2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as NX try: import pylab as P except ImportError: pass try: hd='H' + unichr(252) + 'sker D' + unichr(252) mh='Mot' + unichr(246) + 'rhead' mc='M' + unichr(246) + 'tley Cr' + unichr(252) + 'e' st='Sp' + unichr(305) + 'n' + unichr(776) + 'al Tap' q='Queensr' + unichr(255) + 'che' boc='Blue ' + unichr(214) +'yster Cult' dt='Deatht' + unichr(246) + 'ngue' except NameError: hd='H' + chr(252) + 'sker D' + chr(252) mh='Mot' + chr(246) + 'rhead' mc='M' + chr(246) + 'tley Cr' + chr(252) + 'e' st='Sp' + chr(305) + 'n' + chr(776) + 'al Tap' q='Queensr' + chr(255) + 'che' boc='Blue ' + chr(214) +'yster Cult' dt='Deatht' + chr(246) + 'ngue' G=NX.Graph() G.add_edge(hd,mh) G.add_edge(mc,st) G.add_edge(boc,mc) G.add_edge(boc,dt) G.add_edge(st,dt) G.add_edge(q,st) G.add_edge(dt,mh) G.add_edge(st,mh) # write in UTF-8 encoding fh=open('edgelist.utf-8','wb') fh.write('# -*- coding: utf-8 -*-\n'.encode('utf-8')) # encoding hint for emacs NX.write_multiline_adjlist(G,fh,delimiter='\t', encoding = 'utf-8') # read and store in UTF-8 fh=open('edgelist.utf-8','rb') H=NX.read_multiline_adjlist(fh,delimiter='\t', encoding = 'utf-8') for n in G.nodes(): if n not in H: print(False) print(G.nodes()) try: pos=NX.spring_layout(G) NX.draw(G,pos,font_size=16,with_labels=False) for p in pos: # raise text positions pos[p][1]+=0.07 NX.draw_networkx_labels(G,pos) P.show() except: pass networkx-1.8.1/examples/advanced/iterated_dynamical_systems.py0000664000175000017500000001357512177456333024656 0ustar aricaric00000000000000""" Digraphs from Integer-valued Iterated Functions =============================================== Sums of cubes on 3N ------------------- The number 153 has a curious property. Let 3N={3,6,9,12,...} be the set of positive multiples of 3. Define an iterative process f:3N->3N as follows: for a given n, take each digit of n (in base 10), cube it and then sum the cubes to obtain f(n). When this process is repeated, the resulting series n, f(n), f(f(n)),... terminate in 153 after a finite number of iterations (the process ends because 153 = 1**3 + 5**3 + 3**3). In the language of discrete dynamical systems, 153 is the global attractor for the iterated map f restricted to the set 3N. For example: take the number 108 f(108) = 1**3 + 0**3 + 8**3 = 513 and f(513) = 5**3 + 1**3 + 3**3 = 153 So, starting at 108 we reach 153 in two iterations, represented as: 108->513->153 Computing all orbits of 3N up to 10**5 reveals that the attractor 153 is reached in a maximum of 14 iterations. In this code we show that 13 cycles is the maximum required for all integers (in 3N) less than 10,000. The smallest number that requires 13 iterations to reach 153, is 177, i.e., 177->687->1071->345->216->225->141->66->432->99->1458->702->351->153 The resulting large digraphs are useful for testing network software. The general problem ------------------- Given numbers n, a power p and base b, define F(n; p, b) as the sum of the digits of n (in base b) raised to the power p. The above example corresponds to f(n)=F(n; 3,10), and below F(n; p, b) is implemented as the function powersum(n,p,b). The iterative dynamical system defined by the mapping n:->f(n) above (over 3N) converges to a single fixed point; 153. Applying the map to all positive integers N, leads to a discrete dynamical process with 5 fixed points: 1, 153, 370, 371, 407. Modulo 3 those numbers are 1, 0, 1, 2, 2. The function f above has the added property that it maps a multiple of 3 to another multiple of 3; i.e. it is invariant on the subset 3N. The squaring of digits (in base 10) result in cycles and the single fixed point 1. I.e., from a certain point on, the process starts repeating itself. keywords: "Recurring Digital Invariant", "Narcissistic Number", "Happy Number" The 3n+1 problem ---------------- There is a rich history of mathematical recreations associated with discrete dynamical systems. The most famous is the Collatz 3n+1 problem. See the function collatz_problem_digraph below. The Collatz conjecture --- that every orbit returrns to the fixed point 1 in finite time --- is still unproven. Even the great Paul Erdos said "Mathematics is not yet ready for such problems", and offered $500 for its solution. keywords: "3n+1", "3x+1", "Collatz problem", "Thwaite's conjecture" """ from networkx import * from math import * nmax=10000 p=3 mach_eps=0.00000000001 def digitsrep(n,b=10): """Return list of digits comprising n represented in base b. n must be a nonnegative integer""" # very inefficient if you only work with base 10 dlist=[] if n<=0: return [0] maxpow=int(floor( log(n)/log(b) + mach_eps )) pow=maxpow while pow>=0: x=int(floor(n // b**pow)) dlist.append(x) n=n-x*b**pow pow=pow-1 return dlist def powersum(n,p,b=10): """Return sum of digits of n (in base b) raised to the power p.""" dlist=digitsrep(n,b) sum=0 for k in dlist: sum+=k**p return sum def attractor153_graph(n,p,multiple=3,b=10): """Return digraph of iterations of powersum(n,3,10).""" G=DiGraph() for k in range(1,n+1): if k%multiple==0 and k not in G: k1=k knext=powersum(k1,p,b) while k1!=knext: G.add_edge(k1,knext) k1=knext knext=powersum(k1,p,b) return G def squaring_cycle_graph_old(n,b=10): """Return digraph of iterations of powersum(n,2,10).""" G=DiGraph() for k in range(1,n+1): k1=k G.add_node(k1) # case k1==knext, at least add node knext=powersum(k1,2,b) G.add_edge(k1,knext) while k1!=knext: # stop if fixed point k1=knext knext=powersum(k1,2,b) G.add_edge(k1,knext) if G.out_degree(knext) >=1: # knext has already been iterated in and out break return G def sum_of_digits_graph(nmax,b=10): def f(n): return powersum(n,1,b) return discrete_dynamics_digraph(nmax,f) def squaring_cycle_digraph(nmax,b=10): def f(n): return powersum(n,2,b) return discrete_dynamics_digraph(nmax,f) def cubing_153_digraph(nmax): def f(n): return powersum(n,3,10) return discrete_dynamics_digraph(nmax,f) def discrete_dynamics_digraph(nmax,f,itermax=50000): G=DiGraph() for k in range(1,nmax+1): kold=k G.add_node(kold) knew=f(kold) G.add_edge(kold,knew) while kold!=knew and kold<=1: # knew has already been iterated in and out break return G def collatz_problem_digraph(nmax): def f(n): if n%2==0: return n // 2 else: return 3*n+1 return discrete_dynamics_digraph(nmax,f) def fixed_points(G): """Return a list of fixed points for the discrete dynamical system represented by the digraph G. """ return [n for n in G if G.out_degree(n)==0] if __name__ == "__main__": nmax=10000 print("Building cubing_153_digraph(%d)"% nmax) G=cubing_153_digraph(nmax) print("Resulting digraph has", len(G), "nodes and", G.size()," edges") print("Shortest path from 177 to 153 is:") print(shortest_path(G,177,153)) print("fixed points are %s" % fixed_points(G)) networkx-1.8.1/examples/3d_drawing/0000775000175000017500000000000012177457361017136 5ustar aricaric00000000000000networkx-1.8.1/examples/3d_drawing/mayavi2_spring.py0000664000175000017500000000210712177456333022440 0ustar aricaric00000000000000# needs mayavi2 # run with ipython -wthread import networkx as nx import numpy as np from enthought.mayavi import mlab # some graphs to try #H=nx.krackhardt_kite_graph() #H=nx.Graph();H.add_edge('a','b');H.add_edge('a','c');H.add_edge('a','d') #H=nx.grid_2d_graph(4,5) H=nx.cycle_graph(20) # reorder nodes from 0,len(G)-1 G=nx.convert_node_labels_to_integers(H) # 3d spring layout pos=nx.spring_layout(G,dim=3) # numpy array of x,y,z positions in sorted node order xyz=np.array([pos[v] for v in sorted(G)]) # scalar colors scalars=np.array(G.nodes())+5 mlab.figure(1, bgcolor=(0, 0, 0)) mlab.clf() pts = mlab.points3d(xyz[:,0], xyz[:,1], xyz[:,2], scalars, scale_factor=0.1, scale_mode='none', colormap='Blues', resolution=20) pts.mlab_source.dataset.lines = np.array(G.edges()) tube = mlab.pipeline.tube(pts, tube_radius=0.01) mlab.pipeline.surface(tube, color=(0.8, 0.8, 0.8)) mlab.savefig('mayavi2_spring.png') # mlab.show() # interactive window networkx-1.8.1/examples/subclass/0000775000175000017500000000000012177457361016734 5ustar aricaric00000000000000networkx-1.8.1/examples/subclass/printgraph.py0000664000175000017500000001014612177456333021464 0ustar aricaric00000000000000""" Example subclass of the Graph class. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2009 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. # __docformat__ = "restructuredtext en" from networkx import Graph from networkx.exception import NetworkXException, NetworkXError import networkx.convert as convert from copy import deepcopy class PrintGraph(Graph): """ Example subclass of the Graph class. Prints activity log to file or standard output. """ def __init__(self, data=None, name='', file=None, **attr): Graph.__init__(self, data=data,name=name,**attr) if file is None: import sys self.fh=sys.stdout else: self.fh=open(file,'w') def add_node(self, n, attr_dict=None, **attr): Graph.add_node(self,n,attr_dict=attr_dict,**attr) self.fh.write("Add node: %s\n"%n) def add_nodes_from(self, nodes, **attr): for n in nodes: self.add_node(n, **attr) def remove_node(self,n): Graph.remove_node(self,n) self.fh.write("Remove node: %s\n"%n) def remove_nodes_from(self, nodes): adj = self.adj for n in nodes: self.remove_node(n) def add_edge(self, u, v, attr_dict=None, **attr): Graph.add_edge(self,u,v,attr_dict=attr_dict,**attr) self.fh.write("Add edge: %s-%s\n"%(u,v)) def add_edges_from(self, ebunch, attr_dict=None, **attr): for e in ebunch: u,v=e[0:2] self.add_edge(u,v,attr_dict=attr_dict,**attr) def remove_edge(self, u, v): Graph.remove_edge(self,u,v) self.fh.write("Remove edge: %s-%s\n"%(u,v)) def remove_edges_from(self, ebunch): for e in ebunch: u,v=e[0:2] self.remove_edge(u,v) def clear(self): self.name = '' self.adj.clear() self.node.clear() self.graph.clear() self.fh.write("Clear graph\n") def subgraph(self, nbunch, copy=True): # subgraph is needed here since it can destroy edges in the # graph (copy=False) and we want to keep track of all changes. # # Also for copy=True Graph() uses dictionary assignment for speed # Here we use H.add_edge() bunch =set(self.nbunch_iter(nbunch)) if not copy: # remove all nodes (and attached edges) not in nbunch self.remove_nodes_from([n for n in self if n not in bunch]) self.name = "Subgraph of (%s)"%(self.name) return self else: # create new graph and copy subgraph into it H = self.__class__() H.name = "Subgraph of (%s)"%(self.name) # add nodes H.add_nodes_from(bunch) # add edges seen=set() for u,nbrs in self.adjacency_iter(): if u in bunch: for v,datadict in nbrs.items(): if v in bunch and v not in seen: dd=deepcopy(datadict) H.add_edge(u,v,dd) seen.add(u) # copy node and graph attr dicts H.node=dict( (n,deepcopy(d)) for (n,d) in self.node.items() if n in H) H.graph=deepcopy(self.graph) return H if __name__=='__main__': G=PrintGraph() G.add_node('foo') G.add_nodes_from('bar',weight=8) G.remove_node('b') G.remove_nodes_from('ar') print(G.nodes(data=True)) G.add_edge(0,1,weight=10) print(G.edges(data=True)) G.remove_edge(0,1) G.add_edges_from(list(zip(list(range(0o3)),list(range(1,4)))),weight=10) print(G.edges(data=True)) G.remove_edges_from(list(zip(list(range(0o3)),list(range(1,4))))) print(G.edges(data=True)) G=PrintGraph() G.add_path(list(range(10))) print("subgraph") H1=G.subgraph(list(range(4)),copy=False) H2=G.subgraph(list(range(4)),copy=False) print(H1.edges()) print(H2.edges()) networkx-1.8.1/examples/drawing/0000775000175000017500000000000012177457361016550 5ustar aricaric00000000000000networkx-1.8.1/examples/drawing/lanl_routes.edgelist0000664000175000017500000004510012177456333022617 0ustar aricaric000000000000001 0 9 2 3 173 3 4 167 4 5 165 5 102 96 5 6 96.43 6 7 96.62 7 8 86 8 9 87 9 10 82 10 11 79 11 12 74 12 13 41 13 1 3 14 15 187.46 15 16 187 16 17 187.04 17 18 186.62 18 19 185.88 19 20 185.4 20 21 184.02 21 22 184.38 22 23 183.97 23 24 93.01 24 25 88.59 25 26 86.26 26 27 88.04 27 28 77.12 28 29 74 29 119 70.1 29 30 72 30 31 73 31 32 89.41 32 13 6.63 33 34 212.25 34 35 211.46 35 36 209.04 36 37 211.57 37 38 206.57 38 39 105.04 39 40 77.21 40 41 86.5 41 42 0 42 1 16 43 44 112.67 44 45 111.94 45 46 111.77 46 47 111.83 47 48 111.27 48 49 111.76 49 50 109.32 50 51 110.67 51 52 80.63 52 53 80.78 53 41 30 54 55 164.62 55 56 164.13 56 57 164.24 57 58 156.43 58 59 88.92 59 60 88.6 60 61 86.53 61 62 85.96 62 10 86.8 63 64 221.31 64 65 220.88 65 66 220.84 66 67 220.27 67 68 215.19 68 69 209 69 70 213.81 70 71 208.04 71 72 196.45 72 73 197.05 73 74 163.85 74 75 186.4 75 76 180.35 76 77 103.92 77 78 98.6 78 79 98.83 79 80 96.09 80 10 309 81 82 174.3 82 83 173.57 83 84 173.9 84 85 173.66 85 86 173.75 86 87 92.62 87 88 73.93 88 32 91.44 89 90 158 90 91 155 91 92 158 91 97 157.76 92 93 86 93 94 325.32 94 32 5.53000000000003 95 96 158.17 96 91 157.6 97 98 84.74 98 94 84.87 99 100 282 100 101 279 101 5 167 102 7 87 103 104 201.16 104 105 200.5 105 106 200.5 106 39 121.02 107 108 148.27 108 109 147.59 109 110 147.87 110 111 147.88 111 112 147.82 112 113 140.19 113 114 147.54 114 115 64.02 115 116 67.75 116 117 70.55 117 118 98.3 118 119 80.4 119 31 81.28 120 121 161.26 121 122 160.84 122 123 160.86 123 58 160.84 124 125 238.34 125 126 237.79 126 127 237.08 127 128 234.34 128 129 236.72 129 130 237.18 130 131 119.25 131 132 111.15 132 133 30.93 133 134 50 134 135 49 135 42 43 136 137 148.68 137 138 147.78 138 139 147.74 139 140 148.29 140 141 136.56 141 142 145.37 142 143 144.93 143 144 141.78 143 160 144.92 144 145 141.65 145 146 72.32 146 147 68.81 147 148 73.15 148 149 93.13 149 150 82.38 150 151 86.03 151 152 73.3 152 153 80.2 153 154 83.08 154 32 0 155 156 152.24 156 157 151.47 157 158 150.17 158 159 151.59 159 140 145.38 160 145 143.86 161 162 197.3 162 163 196.83 163 164 196.73 164 165 196.8 165 166 131.22 165 383 128.8 166 167 36 167 168 44.08 168 42 11.49 169 170 178 170 171 174 171 172 166 172 5 166 173 174 213.7 174 175 213.01 175 176 213.03 176 177 213.38 177 178 202.81 178 179 204.73 179 180 203 180 38 202.46 181 182 44 182 183 41 183 184 43 184 185 44 185 186 42 186 187 33 187 188 32 188 189 39.44 189 42 36.28 190 191 258.85 191 192 257.61 192 193 258.1 193 194 87.26 194 195 88.26 195 196 85.82 196 197 18.93 197 198 32 198 188 33.68 199 200 114 200 201 110 201 202 114 202 203 112 203 204 29 204 205 36 205 206 37 206 42 27 207 208 562.86 208 209 561.89 209 210 561.94 210 211 9.50999999999999 211 212 14.12 212 213 6.81000000000006 213 214 1.72000000000003 214 215 11.83 215 117 85.94 216 217 114.12 217 218 112.57 218 219 112.04 219 220 112.01 220 188 88.74 221 222 211 222 223 208 223 224 211 224 225 103 225 226 104.75 226 227 103 226 687 105 227 98 73 228 229 67.83 229 230 66.68 230 231 67.39 231 232 67.36 232 233 66.39 233 234 32.11 234 188 32.19 235 236 194 236 237 191 237 172 166 238 239 191.94 239 240 190.85 240 241 191.48 241 242 190.73 242 243 183.14 243 244 182.32 244 245 179.92 245 246 182.17 246 97 173.79 247 248 195.23 248 249 194.78 249 250 179.06 250 251 179.01 251 252 168.31 252 57 165.26 253 254 92.14 254 255 91.07 255 256 90.41 256 257 90.85 257 258 46.52 258 259 57.31 259 260 4.04000000000001 260 261 19.22 261 262 8.82000000000001 262 263 79 263 41 78 264 265 102.69 265 266 102.24 266 267 101.92 267 1355 116 267 268 101.64 268 269 0 269 270 80.77 270 271 89.27 271 272 115 272 273 116 273 274 115 274 133 50 275 276 58.9 276 277 58.19 277 278 0 278 279 58.13 279 280 58.21 280 281 58.04 281 282 58.12 282 283 31.7 283 284 26.07 284 285 38.42 284 574 32.12 285 286 196 286 42 127 287 288 186.74 288 289 185.88 289 290 183.24 290 291 184.06 291 292 181.22 292 293 0 293 294 171.29 294 295 102.04 295 296 68.99 296 30 68.93 297 298 103.31 298 299 102.8 299 300 102.75 300 301 102.77 301 302 102.55 302 270 101.64 303 304 555.45 305 306 76.2 306 307 75.65 307 308 75.66 308 309 75.9 309 310 75.7 310 311 69.12 311 312 68.99 312 32 0 313 314 170 314 315 167 315 316 169 316 317 169 317 318 169 318 319 167 319 320 167 320 321 27 321 198 16 322 323 178 323 324 175 324 325 177 325 326 125 326 327 117 327 328 34 328 329 34 329 330 34 330 331 0 331 198 33 332 333 85.6 333 334 84.88 334 335 84.69 335 336 77.26 336 337 80.75 337 53 79.65 338 339 104.12 339 340 103.35 340 341 103.55 341 342 61.24 342 343 90.09 343 344 92.58 344 345 92.92 345 346 86 346 9 86 347 348 106.96 348 349 106.37 349 350 75.33 350 351 90.59 351 61 66 352 353 76.11 353 354 75.34 354 355 75.16 355 356 74.84 356 357 61.66 357 358 103.06 358 359 34 359 168 34 360 361 122.16 361 362 121.69 362 166 121.31 363 364 63.84 364 365 63.35 365 366 63.5 366 367 48.79 367 368 47.81 368 369 38.89 369 370 38.77 370 371 37.31 371 358 36.43 372 373 210 373 374 206 374 375 187 375 376 343 376 377 193 377 378 33 378 379 86 378 866 33.57 378 958 33 379 285 196 380 381 51.17 381 382 50.66 382 383 27.75 383 359 32 384 385 44.03 385 386 43.56 386 387 43.19 387 358 40.03 388 389 79.01 389 390 0 390 358 78.04 391 392 186.71 392 393 185.92 393 394 184.8 394 395 171.14 395 396 176.31 396 53 82 397 398 133.54 398 399 133.05 399 400 133.09 400 383 132.6 401 402 101.49 402 403 101 403 358 100.55 404 405 199.34 405 406 198.85 406 407 198.94 407 408 199.01 408 409 198.79 409 410 53.72 410 411 33.45 411 412 38.86 412 413 36.46 413 414 38.68 414 205 31.92 415 416 35.64 416 417 34.93 417 418 34.91 418 414 34.93 419 420 119.9 420 421 119.37 421 422 119.02 422 423 118.89 423 215 111.28 424 425 57.09 425 426 56.3 426 427 54.33 427 428 55.68 428 429 55.89 429 430 52.57 430 283 39.54 431 432 91.65 432 433 90.74 433 434 91.05 434 435 90.27 435 436 87.89 436 437 83.17 437 438 76.52 438 439 67.26 440 441 32.05 441 42 32.57 442 443 103.73 443 444 100.75 444 357 103.22 445 446 226.87 446 447 226.41 447 448 226.52 448 166 225.66 449 450 107.54 450 451 107.08 451 383 106.74 452 453 190.21 453 454 186.79 454 455 188.77 455 456 188.36 456 457 41.12 457 458 39.38 458 459 39.48 459 460 37.96 460 286 32 461 462 32.81 462 359 32.31 463 464 106.75 464 465 106.27 465 358 105.93 466 467 34.12 467 468 15.78 468 469 33.79 469 470 33.05 470 471 53.35 471 188 17.55 472 473 90.41 473 474 89.07 474 475 90.06 475 476 89.2 476 477 88.42 477 478 85.87 478 10 0 479 480 142.27 480 481 8.90000000000001 481 482 13.62 482 483 112.18 483 29 138.4 484 485 101.64 485 486 100.96 486 487 101.12 487 488 100.77 488 489 49.98 489 117 48.89 490 491 42.63 491 492 42.02 492 493 41.89 493 494 41.72 494 460 40.09 495 496 184.89 496 497 184.2 497 498 184.58 498 499 184.51 499 500 43.65 500 501 43.27 501 502 39.97 502 286 39.64 503 504 87.59 504 505 86.98 505 506 68.34 506 507 86.09 507 79 85.85 508 509 35.46 509 510 34.98 510 511 35.09 511 512 35.06 512 203 32.71 513 514 237.49 514 515 236.63 515 516 235.99 516 517 235.45 517 119 232.59 518 519 108.9 519 520 108.33 520 521 108.34 521 522 108.32 522 523 89.23 523 524 76.97 524 525 76.55 525 32 74.26 526 527 94.58 527 528 93.93 528 529 93.74 529 530 92.33 530 531 91.65 531 532 92.78 532 533 52.01 533 119 76 534 535 278.82 535 536 277.82 536 537 277.42 537 538 277.54 538 215 269.96 539 540 61.14 540 541 59.85 541 542 60.8 542 543 60.65 543 544 56.01 544 470 33.14 545 546 111.16 546 547 109.47 547 548 110.05 548 549 108.98 549 550 108.86 550 551 47.69 551 552 47.26 552 205 55.57 553 554 85.92 554 555 83.29 555 556 85.58 556 557 85.45 557 558 79.84 558 559 76.12 559 478 67.6 560 561 315.3 561 562 314.68 562 563 314.69 563 564 314.81 564 565 291.4 565 596 287.51 566 567 281.41 567 568 117.21 567 597 134.64 568 569 121.28 569 570 138.98 570 571 130.29 571 572 85.41 572 573 85.15 573 283 35.07 574 286 31.89 575 576 107.08 576 577 106.44 577 578 96 578 1347 96 578 579 106.29 579 580 106.33 580 204 21 581 582 182.8 582 583 180.62 583 584 181.1 584 585 181 585 586 176.62 586 587 167.92 587 588 33.47 588 589 34.93 589 580 33.76 590 591 293.64 591 592 293.08 592 593 293.16 594 595 292.45 595 564 291.9 596 566 287.81 597 569 129.61 598 599 108.64 599 600 103.72 600 203 108.12 601 602 188.26 602 603 186.83 603 101 150.99 604 605 172.09 605 606 171.68 606 607 171.75 607 57 165.06 608 609 108.26 609 610 107.62 610 611 107.17 611 612 107.3 612 613 94.52 614 615 179.19 615 616 178.63 616 617 178.73 617 618 177.65 618 619 93.43 619 312 87.42 620 621 180.61 621 622 179.95 622 623 180.24 623 624 180.2 624 625 180.07 625 626 182.28 626 627 176.33 627 628 99.93 628 629 96.99 629 630 96.68 630 396 76.07 631 632 156.82 632 633 156.12 633 634 155.87 634 635 156 635 636 155.69 636 637 155.46 637 638 169.15 638 639 168.91 639 640 168.78 640 28 47.56 641 642 199.77 642 643 196.16 643 644 199.4 644 645 197.12 645 646 174.59 646 71 197.29 647 648 234 648 649 230 649 650 233 650 651 102 651 652 98 652 197 101 653 654 187 654 655 184 655 101 167 656 657 203.5 657 658 202.75 658 659 202.65 659 660 202.31 660 661 202.32 661 662 202.19 662 663 181.05 663 664 93.39 664 665 86.23 665 148 93.14 666 667 204.1 667 668 203.08 668 669 203.14 669 617 177.3 670 671 115.11 671 672 114.18 672 673 114.39 673 674 110.33 674 675 107.11 676 677 262 677 678 261.21 678 679 256.73 679 680 258.46 680 681 258.25 681 637 257.7 682 683 234.24 683 684 233.35 684 685 232.7 685 686 29.28 686 225 103.48 687 93 321.3 688 689 89.06 689 690 88.36 691 692 88.72 692 693 88.5 693 285 88.12 694 695 637.37 695 696 636.56 696 697 634.76 697 698 628.88 698 699 615.38 699 700 102.91 700 701 85.77 701 702 78.1799999999999 702 703 96.09 703 165 52.89 704 705 98.12 705 706 97.48 706 707 95.44 707 708 97.47 708 709 97.42 709 167 43.83 710 711 237.43 711 712 236.85 712 713 236.53 713 714 236.33 714 715 208.15 715 716 207.98 716 38 108.65 716 717 107.34 717 39 111.29 718 719 87 719 720 82 720 721 86 721 722 87 722 723 86 723 724 79 724 725 32 725 396 83 726 727 196.18 727 728 195.25 728 729 194.91 729 730 195.83 730 731 110.77 731 732 111.26 732 733 111.4 733 734 111.31 734 735 111.15 735 736 90.31 736 737 93.77 737 215 96.41 738 739 456.99 739 740 454.87 740 741 455.13 741 742 454.46 742 743 422.76 743 744 327.98 744 687 326.23 745 746 241.98 746 747 241.31 747 748 241.24 748 749 236 749 750 231.22 750 751 91.12 751 752 97.31 752 753 103.3 753 754 31.03 754 198 0 755 756 219.66 756 757 218.95 757 758 219.13 758 759 219.11 759 760 218.51 760 761 218.5 761 762 200.71 762 763 199.49 763 203 105.48 764 765 186.62 765 766 185.87 766 5 127.52 767 768 88 768 102 85 769 770 199.66 770 771 199.14 771 772 199.05 772 773 196.95 773 774 194.12 774 775 194 775 776 190.3 776 777 190.08 777 625 187.75 778 779 203.98 779 780 203.14 780 781 202.48 781 782 182.54 782 783 177.2 783 626 175.98 784 785 230.98 785 786 230.13 786 787 229.17 787 788 228.7 788 789 222.24 789 790 208.82 790 791 96.82 791 628 96.76 792 793 107 793 794 105 794 795 106 795 796 107 796 797 106 797 798 106 798 799 91 799 345 91 800 801 343 801 375 340 802 803 191 803 804 188 804 805 190 805 806 191 806 807 149 807 808 149 808 809 146 809 810 11 810 811 13 811 812 33.77 812 42 16.29 813 814 201.7 814 815 200.68 815 816 199.52 816 817 199.61 817 818 199.6 818 819 45.02 819 820 44.94 820 821 26.66 821 285 43.1 822 823 156.08 823 824 131.23 824 825 149.04 825 826 151.23 826 827 142.62 827 828 111.31 827 856 109.19 828 829 110.5 829 295 110.86 830 831 83.15 831 832 82.49 832 833 80.44 833 834 80.99 834 40 81.66 835 836 126.83 836 837 125.73 837 838 125.54 838 839 118.42 839 840 110.85 840 841 111.09 841 842 81.99 842 358 80.97 843 844 619.19 844 845 618.53 845 846 106.9 846 847 105.36 847 848 65.2 848 849 82.97 849 850 103.91 850 851 106.54 851 829 105.35 852 853 161.36 853 854 160.48 854 855 153.31 855 826 145.92 856 857 111.26 857 858 111.27 858 533 72 859 860 146.09 860 861 145.5 861 827 144.82 862 863 75.24 863 864 74.61 864 865 34.1 865 378 197 866 285 33.46 867 868 138 868 869 135 869 166 137 870 871 526.33 871 872 524.67 872 873 171.42 873 874 167.51 874 875 174.09 875 876 98.02 876 877 100.53 877 878 104.75 878 879 98.28 879 880 103.86 880 117 0 881 882 406.4 882 883 405.18 883 884 404.32 884 885 395.55 885 886 38.38 886 887 39.79 887 574 36.72 888 889 196.54 889 890 195.66 890 891 195.5 891 892 34.95 892 893 34.39 893 188 35.13 894 895 98 895 896 95 896 897 98 897 383 98 898 899 200.41 899 900 199.7 900 901 199.66 901 902 199.24 902 903 199.17 903 165 194.72 904 905 240.47 905 906 239.63 906 907 240.14 907 908 239.85 908 909 239.93 909 910 239.8 910 911 153.73 911 912 139.02 912 913 25.99 913 914 25.8 914 640 30.59 915 916 57.63 916 917 56.93 917 918 56.9 918 919 56.61 919 920 56.6 920 921 56.31 921 922 52.48 922 923 49.64 923 924 37 925 926 698.1 926 927 696.29 927 928 697.35 928 929 696.84 929 930 104.07 930 931 112.53 931 932 116.56 932 933 113.51 933 42 72.73 934 935 108 935 936 105 936 937 108 937 166 108 938 939 247.1 939 940 246.42 940 941 245.86 941 942 218.76 942 943 218.87 943 944 33.42 944 188 31 945 946 75 946 947 72 947 383 74 948 949 103 949 950 95 950 951 103 951 952 103 952 953 98 953 725 83 954 955 180 955 956 177 956 957 179 957 377 76 957 865 146 958 460 32 959 960 91 960 961 87 961 962 91 962 963 91 963 964 90 964 118 89 965 966 306 966 967 303 967 968 305 968 969 155 969 970 303 970 971 192 971 972 112 972 973 112 973 974 111 974 975 80 975 41 79 976 977 170.09 977 978 56.57 978 979 165.74 979 980 169.19 980 981 168.92 981 982 169.16 982 983 169.1 983 984 161.78 984 985 161.68 985 986 164.96 986 987 164.63 987 988 144.05 988 989 33.96 989 811 33.93 990 991 194.72 991 992 193.9 992 993 194.2 993 994 165.1 994 995 164.38 995 985 164.86 996 997 144.93 997 998 144.35 998 999 144.48 999 1000 144.54 1000 1001 142.77 1001 987 144.15 1002 1003 92 1003 1004 16 1004 1005 60 1005 62 91 1006 1007 103 1007 1008 100 1008 1009 103 1009 1010 96 1010 1011 60 1011 80 86 1012 1013 101 1013 1014 97 1014 1015 95 1015 1016 77 1016 60 54 1017 1018 98 1018 1019 95 1019 1020 0 1020 351 19 1021 1022 380 1022 1023 254 1023 1024 370 1024 1025 372 1025 1026 379 1026 80 311 1027 1028 47 1028 1029 40 1029 1030 40 1030 1031 46 1031 924 59.42 1032 1033 39 1033 1034 36 1034 1035 38 1035 552 39 1036 1037 56 1037 1038 52 1038 1039 55 1039 923 55 1040 1041 42 1041 1042 39 1042 1043 42 1043 358 41 1044 1045 104 1045 1046 101 1046 1047 103 1047 1048 102 1048 1049 102 1049 858 98 1050 1051 338 1051 1052 335 1052 1053 337 1053 1054 337 1054 1055 64 1055 1056 178 1056 1057 177 1057 1058 177 1058 1059 177 1059 1060 64 1060 1061 153 1061 1062 31 1062 460 153 1063 1064 183 1064 1065 180 1065 377 33 1066 1067 255.41 1067 1068 254.7 1068 1069 254.31 1069 1070 218.96 1070 1071 238.65 1071 1072 238.41 1072 1073 237.99 1073 1074 81.72 1074 1075 102.28 1075 188 92.68 1076 1077 87.02 1077 1078 86.6 1078 1079 86.7 1079 1080 86.71 1080 1081 86.51 1081 1082 86.28 1082 117 72.11 1083 1084 37 1084 865 26 1085 1086 88.34 1086 1087 86.73 1087 1088 87.48 1088 1089 87.6 1089 1090 86.77 1090 571 85.58 1091 1092 82 1092 1093 78 1093 1094 80 1094 1095 79 1095 1096 78 1096 1097 77 1097 1098 79 1098 262 79 1099 1100 80.61 1100 1101 77.63 1101 40 71.88 1102 1103 90.9 1103 1104 90.1 1104 1105 90.42 1106 1107 329 1107 1108 326 1108 957 301 1109 1110 157.39 1110 1111 156.91 1111 1112 156.99 1112 1113 32.57 1113 188 3.44 1114 1115 33 1115 1116 29 1116 1117 33 1117 1118 32 1118 1119 32 1119 135 32 1120 1121 179 1121 1122 175 1122 1123 178 1123 1124 0 1124 1125 0 1126 1127 43.71 1127 1128 43.16 1128 1129 41.4 1129 1130 33.49 1130 812 31.02 1131 1132 87.82 1132 1133 86.88 1133 1134 85.49 1134 9 77.69 1135 1136 98 1136 1137 102 1137 1138 98 1138 1139 100 1139 1140 41 1140 1141 95 1141 1142 91 1142 41 34 1143 1144 352.6 1144 1145 351.82 1145 1146 351.73 1146 1147 343.47 1147 1148 323.5 1148 1149 172.05 1149 1150 171.57 1150 1151 179.87 1151 1152 154.08 1152 1153 157.73 1153 1154 170.23 1154 42 55.24 1155 1156 185.3 1156 1157 184.13 1157 1158 183.56 1158 1159 183.14 1159 1160 42.16 1160 1161 40.43 1162 1163 40.61 1163 1164 32.05 1164 1165 34.36 1165 205 31.2 1166 1167 39 1167 1168 36 1168 1169 0 1169 1170 38 1170 924 38 1171 1172 240.93 1172 1173 240.24 1173 1174 238.02 1174 1074 96.87 1175 1176 107 1176 1177 104 1177 1178 104 1178 1179 104 1179 1180 102 1180 1181 84 1181 1182 84 1182 28 74 1183 1184 108.07 1184 1185 107.53 1185 1186 106.58 1186 709 106.38 1187 1188 119.42 1188 1189 118.71 1189 1190 118.15 1190 1191 117.11 1191 1192 115.11 1192 1193 114.82 1193 1194 104.91 1194 41 103.99 1195 1196 37.68 1196 1197 37.15 1197 1198 37.19 1198 1199 33.94 1199 414 34.17 1200 1201 118.72 1201 1202 113.14 1202 1203 0 1203 1204 78.09 1204 1205 117.75 1205 1206 117.48 1206 1207 103.57 1207 1208 116.96 1208 1209 117.29 1209 1210 106.13 1210 10 113.75 1211 1212 95.92 1212 1213 95.3 1213 1214 93.98 1214 1215 93.98 1215 1216 93.93 1216 1217 89.21 1217 1218 85.06 1218 1219 65.28 1219 1220 65.3 1220 40 63.33 1221 1222 33 1222 1223 30 1223 1224 33 1224 187 33 1225 1226 216.31 1226 1227 215.75 1227 1228 207.33 1228 716 201.93 1229 1230 163.93 1230 1231 163.51 1231 1232 163.47 1232 1233 163.54 1233 1234 91.76 1234 59 88.9 1235 1236 155.59 1236 1237 154.73 1237 1238 155.15 1238 1239 155.18 1239 1240 155.17 1240 1241 155.12 1241 1242 154.86 1242 1243 75.58 1243 1244 75.58 1244 1245 75.6 1245 117 70.16 1246 1247 211.11 1247 1248 210.29 1248 1249 210.75 1249 1250 210.68 1250 1251 210.58 1251 1252 210.37 1252 1227 209.84 1253 1254 86.95 1254 1255 86.13 1255 1256 86.29 1256 1257 85.37 1257 1258 77.29 1258 1259 76.3 1259 215 75.57 1260 1261 117.66 1261 1262 109.11 1262 1263 101.86 1263 1264 92.95 1264 215 107.68 1265 1266 69.35 1266 1267 68.74 1267 1268 67.13 1268 1269 67.59 1269 1270 66.29 1270 1031 67.14 1271 1272 95.06 1272 1273 94.48 1273 1274 93.89 1274 1275 93.92 1275 1276 76.55 1276 1277 93.73 1277 149 87.97 1278 1279 67 1279 1280 90 1280 1281 91 1281 1282 91 1282 1283 90 1283 1284 89 1284 1285 90 1285 1286 89 1286 1287 85 1287 9 85 1288 1289 123 1289 1290 120 1290 1291 122 1291 1292 115 1292 1293 119 1293 1294 116 1294 1295 113 1295 1296 116 1296 1297 44 1297 133 29 1298 1299 109 1299 1300 106 1300 1301 103 1301 1302 103 1302 1303 103 1303 1304 94 1304 1305 95 1305 1306 90 1306 1307 84 1307 1308 84 1308 1142 83 1309 1310 215 1310 1311 212 1311 1312 214 1312 1313 212 1313 1314 213 1313 1331 245.24 1314 188 37.52 1315 1316 227 1316 1317 224 1317 1318 226 1318 1319 226 1319 944 45 1320 1321 219.19 1321 1322 218.4 1322 1323 218.88 1323 1324 218.55 1324 1325 213.7 1325 1314 214.5 1326 1327 248.5 1327 1328 247.98 1328 1329 248.15 1329 1330 247.15 1330 1313 245.15 1331 188 64.76 1332 1333 140 1333 1334 137 1334 1335 139 1335 1336 139 1336 1337 139 1337 1338 138 1338 1339 70 1339 1340 119 1340 1341 121 1341 1342 121 1342 274 107 1343 1344 97 1344 1345 93 1345 1346 96 1346 577 96 1347 580 95 1348 1349 51 1349 1350 48 1350 1351 50 1351 203 47 1352 1353 117 1353 1354 114 1354 267 116 1355 1356 116 1356 1357 116 1357 271 116 networkx-1.8.1/examples/drawing/edge_colormap.py0000664000175000017500000000075712177456333021731 0ustar aricaric00000000000000#!/usr/bin/env python """ Draw a graph with matplotlib, color edges. You must have matplotlib>=87.7 for this to work. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" try: import matplotlib.pyplot as plt except: raise import networkx as nx G=nx.star_graph(20) pos=nx.spring_layout(G) colors=range(20) nx.draw(G,pos,node_color='#A0CBE2',edge_color=colors,width=4,edge_cmap=plt.cm.Blues,with_labels=False) plt.savefig("edge_colormap.png") # save as png plt.show() # display networkx-1.8.1/examples/drawing/chess_masters.py0000777000175000017500000000000012177456333027562 2../multigraph/chess_masters.pyustar aricaric00000000000000networkx-1.8.1/examples/drawing/node_colormap.py0000664000175000017500000000070412177456333021742 0ustar aricaric00000000000000#!/usr/bin/env python """ Draw a graph with matplotlib, color by degree. You must have matplotlib for this to work. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" try: import matplotlib.pyplot as plt except: raise import networkx as nx G=nx.cycle_graph(24) pos=nx.spring_layout(G,iterations=200) nx.draw(G,pos,node_color=range(24),node_size=800,cmap=plt.cm.Blues) plt.savefig("node_colormap.png") # save as png plt.show() # display networkx-1.8.1/examples/drawing/sampson.py0000664000175000017500000000254312177456333020604 0ustar aricaric00000000000000#!/usr/bin/env python """ Sampson's monastery data. Shows how to read data from a zip file and plot multiple frames. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import zipfile, cStringIO import networkx as nx import matplotlib.pyplot as plt zf = zipfile.ZipFile('sampson_data.zip') # zipfile object e1=cStringIO.StringIO(zf.read('samplike1.txt')) # read info file e2=cStringIO.StringIO(zf.read('samplike2.txt')) # read info file e3=cStringIO.StringIO(zf.read('samplike3.txt')) # read info file G1=nx.read_edgelist(e1,delimiter='\t') G2=nx.read_edgelist(e2,delimiter='\t') G3=nx.read_edgelist(e3,delimiter='\t') pos=nx.spring_layout(G3,iterations=100) plt.clf() plt.subplot(221) plt.title('samplike1') nx.draw(G1,pos,node_size=50,with_labels=False) plt.subplot(222) plt.title('samplike2') nx.draw(G2,pos,node_size=50,with_labels=False) plt.subplot(223) plt.title('samplike3') nx.draw(G3,pos,node_size=50,with_labels=False) plt.subplot(224) plt.title('samplike1,2,3') nx.draw(G3,pos,edgelist=G3.edges(),node_size=50,with_labels=False) nx.draw_networkx_edges(G1,pos,alpha=0.25) nx.draw_networkx_edges(G2,pos,alpha=0.25) plt.savefig("sampson.png") # save as png plt.show() # display networkx-1.8.1/examples/drawing/unix_email.py0000775000175000017500000000517312177456333021263 0ustar aricaric00000000000000#!/usr/bin/env python """ Create a directed graph, allowing multiple edges and self loops, from a unix mailbox. The nodes are email addresses with links that point from the sender to the recievers. The edge data is a Python email.Message object which contains all of the email message data. This example shows the power of XDiGraph to hold edge data of arbitrary Python objects (in this case a list of email messages). By default, load the sample unix email mailbox called "unix_email.mbox". You can load your own mailbox by naming it on the command line, eg python unixemail.py /var/spool/mail/username """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2005 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import email from email.utils import getaddresses,parseaddr import mailbox import sys # unix mailbox recipe # see http://www.python.org/doc/current/lib/module-mailbox.html def msgfactory(fp): try: return email.message_from_file(fp) except email.Errors.MessageParseError: # Don't return None since that will stop the mailbox iterator return '' if __name__ == '__main__': import networkx as nx try: import matplotlib.pyplot as plt except: pass if len(sys.argv)==1: filePath = "unix_email.mbox" else: filePath = sys.argv[1] mbox = mailbox.mbox(filePath, msgfactory) # parse unix mailbox G=nx.MultiDiGraph() # create empty graph # parse each messages and build graph for msg in mbox: # msg is python email.Message.Message object (source_name,source_addr) = parseaddr(msg['From']) # sender # get all recipients # see http://www.python.org/doc/current/lib/module-email.Utils.html tos = msg.get_all('to', []) ccs = msg.get_all('cc', []) resent_tos = msg.get_all('resent-to', []) resent_ccs = msg.get_all('resent-cc', []) all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) # now add the edges for this mail message for (target_name,target_addr) in all_recipients: G.add_edge(source_addr,target_addr,message=msg) # print edges with message subject for (u,v,d) in G.edges_iter(data=True): print("From: %s To: %s Subject: %s"%(u,v,d['message']["Subject"])) try: # draw pos=nx.spring_layout(G,iterations=10) nx.draw(G,pos,node_size=0,alpha=0.4,edge_color='r',font_size=16) plt.savefig("unix_email.png") plt.show() except: # matplotlib not available pass networkx-1.8.1/examples/drawing/atlas.py0000664000175000017500000000511512177456333020226 0ustar aricaric00000000000000#!/usr/bin/env python """ Atlas of all graphs of 6 nodes or less. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx #from networkx import * #from networkx.generators.atlas import * from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic as isomorphic import random def atlas6(): """ Return the atlas of all connected graphs of 6 nodes or less. Attempt to check for isomorphisms and remove. """ Atlas=nx.graph_atlas_g()[0:208] # 208 # remove isolated nodes, only connected graphs are left U=nx.Graph() # graph for union of all graphs in atlas for G in Atlas: zerodegree=[n for n in G if G.degree(n)==0] for n in zerodegree: G.remove_node(n) U=nx.disjoint_union(U,G) # list of graphs of all connected components C=nx.connected_component_subgraphs(U) UU=nx.Graph() # do quick isomorphic-like check, not a true isomorphism checker nlist=[] # list of nonisomorphic graphs for G in C: # check against all nonisomorphic graphs so far if not iso(G,nlist): nlist.append(G) UU=nx.disjoint_union(UU,G) # union the nonisomorphic graphs return UU def iso(G1, glist): """Quick and dirty nonisomorphism checker used to check isomorphisms.""" for G2 in glist: if isomorphic(G1,G2): return True return False if __name__ == '__main__': import networkx as nx G=atlas6() print("graph has %d nodes with %d edges"\ %(nx.number_of_nodes(G),nx.number_of_edges(G))) print(nx.number_connected_components(G),"connected components") try: from networkx import graphviz_layout except ImportError: raise ImportError("This example needs Graphviz and either PyGraphviz or Pydot") import matplotlib.pyplot as plt plt.figure(1,figsize=(8,8)) # layout graphs with positions using graphviz neato pos=nx.graphviz_layout(G,prog="neato") # color nodes the same in each connected subgraph C=nx.connected_component_subgraphs(G) for g in C: c=[random.random()]*nx.number_of_nodes(g) # random color... nx.draw(g, pos, node_size=40, node_color=c, vmin=0.0, vmax=1.0, with_labels=False ) plt.savefig("atlas.png",dpi=75) networkx-1.8.1/examples/drawing/weighted_graph.py0000664000175000017500000000177612177456333022114 0ustar aricaric00000000000000#!/usr/bin/env python """ An example using Graph as a weighted network. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" try: import matplotlib.pyplot as plt except: raise import networkx as nx G=nx.Graph() G.add_edge('a','b',weight=0.6) G.add_edge('a','c',weight=0.2) G.add_edge('c','d',weight=0.1) G.add_edge('c','e',weight=0.7) G.add_edge('c','f',weight=0.9) G.add_edge('a','d',weight=0.3) elarge=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] >0.5] esmall=[(u,v) for (u,v,d) in G.edges(data=True) if d['weight'] <=0.5] pos=nx.spring_layout(G) # positions for all nodes # nodes nx.draw_networkx_nodes(G,pos,node_size=700) # edges nx.draw_networkx_edges(G,pos,edgelist=elarge, width=6) nx.draw_networkx_edges(G,pos,edgelist=esmall, width=6,alpha=0.5,edge_color='b',style='dashed') # labels nx.draw_networkx_labels(G,pos,font_size=20,font_family='sans-serif') plt.axis('off') plt.savefig("weighted_graph.png") # save as png plt.show() # display networkx-1.8.1/examples/drawing/degree_histogram.py0000664000175000017500000000174412177456333022436 0ustar aricaric00000000000000#!/usr/bin/env python """ Random graph from given degree sequence. Draw degree histogram with matplotlib. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" try: import matplotlib.pyplot as plt import matplotlib except: raise import networkx as nx z=nx.utils.create_degree_sequence(100,nx.utils.powerlaw_sequence,exponent=2.1) nx.is_valid_degree_sequence(z) print "Configuration model" G=nx.configuration_model(z) # configuration model degree_sequence=sorted(nx.degree(G).values(),reverse=True) # degree sequence #print "Degree sequence", degree_sequence dmax=max(degree_sequence) plt.loglog(degree_sequence,'b-',marker='o') plt.title("Degree rank plot") plt.ylabel("degree") plt.xlabel("rank") # draw graph in inset plt.axes([0.45,0.45,0.45,0.45]) Gcc=nx.connected_component_subgraphs(G)[0] pos=nx.spring_layout(Gcc) plt.axis('off') nx.draw_networkx_nodes(Gcc,pos,node_size=20) nx.draw_networkx_edges(Gcc,pos,alpha=0.4) plt.savefig("degree_histogram.png") plt.show() networkx-1.8.1/examples/drawing/lanl_routes.py0000664000175000017500000000373112177456333021453 0ustar aricaric00000000000000#!/usr/bin/env python """ Routes to LANL from 186 sites on the Internet. This uses Graphviz for layout so you need PyGraphviz or Pydot. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2008 # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. def lanl_graph(): """ Return the lanl internet view graph from lanl.edges """ import networkx as nx try: fh=open('lanl_routes.edgelist','r') except IOError: print "lanl.edges not found" raise G=nx.Graph() time={} time[0]=0 # assign 0 to center node for line in fh.readlines(): (head,tail,rtt)=line.split() G.add_edge(int(head),int(tail)) time[int(head)]=float(rtt) # get largest component and assign ping times to G0time dictionary G0=nx.connected_component_subgraphs(G)[0] G0.rtt={} for n in G0: G0.rtt[n]=time[n] return G0 if __name__ == '__main__': import networkx as nx import math try: from networkx import graphviz_layout except ImportError: raise ImportError("This example needs Graphviz and either PyGraphviz or Pydot") G=lanl_graph() print "graph has %d nodes with %d edges"\ %(nx.number_of_nodes(G),nx.number_of_edges(G)) print nx.number_connected_components(G),"connected components" import matplotlib.pyplot as plt plt.figure(figsize=(8,8)) # use graphviz to find radial layout pos=nx.graphviz_layout(G,prog="twopi",root=0) # draw nodes, coloring by rtt ping time nx.draw(G,pos, node_color=[G.rtt[v] for v in G], with_labels=False, alpha=0.5, node_size=15) # adjust the plot limits xmax=1.02*max(xx for xx,yy in pos.values()) ymax=1.02*max(yy for xx,yy in pos.values()) plt.xlim(0,xmax) plt.ylim(0,ymax) plt.savefig("lanl_routes.png") networkx-1.8.1/examples/drawing/knuth_miles.txt.gz0000777000175000017500000000000012177456333027261 2../graph/knuth_miles.txt.gzustar aricaric00000000000000networkx-1.8.1/examples/drawing/sampson_data.zip0000664000175000017500000000172012177456333021743 0ustar aricaric00000000000000PKÖUe<Êëtë¤5 samplike1.txtUT ƒC‘KE‡’Kux èè%Ž» Ã0 Dkq YÀÔ_Y#°‹þÀv“ísG÷D‘<’/ÿëqí›_÷m\÷rþü<îñöÇr~÷Ù«d§]À¤@R~’‹R¡ì’¨Ë doUÂU8+\U”² -¶. Ò‰P"¬i!šué„6U2“±ŸªSÅ Àd2Ñ,Íy?ÙH M6:árÍ¢•Äba±UÕ,Í2íPKúUe<Ùµ½¥= samplike2.txtUT ÇC‘KTD‘Kux èèŽKƒ0 C×É)2Ó `çßkô™EØôö•µÐ#X²’WøŒý¼çöyŒûÙ®_XÇ3ÞáÜ®ï\ƒúêÄ‹S¡ ¨C¢›$á$$’Ù+” Uþø ›ßæe1ˆ!ê:Vºo¬nV·+€h(4eAרd$YIaD¬Q”KÊsd<ÚëÁâ$±Å^/™ë™UÅî•ÊIeºqÒè¶?PKBVe<|ò±£8 samplike3.txtUT KD‘KTD‘Kux èèŽKà C×p fz°!½FOÀL²è"ŸI²éí+y¡‡#,‘WúŒí¼=mÇ>îg½~iÏx§s½¾Ç’J”c*¤AÂAB‰ªPƒ4Tœ¾ÂWøŠoΆ;CIC¸áìq†d"„Èýfú3}À€dâYœÕÙœ~kA•b’}#3XXFfguªUüUQ¾"n^j^jn7Ÿ›gº;ÝwúPKÖUe<Êëtë¤5 ¤samplike1.txtUTƒC‘Kux èèPKúUe<Ùµ½¥= ¤ësamplike2.txtUTÇC‘Kux èèPKBVe<|ò±£8 ¤×samplike3.txtUTKD‘Kux èèPKùÁnetworkx-1.8.1/examples/drawing/giant_component.py0000664000175000017500000000400212177456333022300 0ustar aricaric00000000000000#!/usr/bin/env python """ This example illustrates the sudden appearance of a giant connected component in a binomial random graph. Requires pygraphviz and matplotlib to draw. """ # Copyright (C) 2006-2008 # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. try: import matplotlib.pyplot as plt except: raise import networkx as nx import math try: from networkx import graphviz_layout layout=nx.graphviz_layout except ImportError: print "PyGraphviz not found; drawing with spring layout; will be slow." layout=nx.spring_layout n=150 # 150 nodes # p value at which giant component (of size log(n) nodes) is expected p_giant=1.0/(n-1) # p value at which graph is expected to become completely connected p_conn=math.log(n)/float(n) # the following range of p values should be close to the threshold pvals=[0.003, 0.006, 0.008, 0.015] region=220 # for pylab 2x2 subplot layout plt.subplots_adjust(left=0,right=1,bottom=0,top=0.95,wspace=0.01,hspace=0.01) for p in pvals: G=nx.binomial_graph(n,p) pos=layout(G) region+=1 plt.subplot(region) plt.title("p = %6.3f"%(p)) nx.draw(G,pos, with_labels=False, node_size=10 ) # identify largest connected component Gcc=nx.connected_component_subgraphs(G) G0=Gcc[0] nx.draw_networkx_edges(G0,pos, with_labels=False, edge_color='r', width=6.0 ) # show other connected components for Gi in Gcc[1:]: if len(Gi)>1: nx.draw_networkx_edges(Gi,pos, with_labels=False, edge_color='r', alpha=0.3, width=5.0 ) plt.savefig("giant_component.png") plt.show() # display networkx-1.8.1/examples/drawing/house_with_colors.py0000664000175000017500000000117312177456333022661 0ustar aricaric00000000000000#!/usr/bin/env python """ Draw a graph with matplotlib. You must have matplotlib for this to work. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" try: import matplotlib.pyplot as plt except: raise import networkx as nx G=nx.house_graph() # explicitly set positions pos={0:(0,0), 1:(1,0), 2:(0,1), 3:(1,1), 4:(0.5,2.0)} nx.draw_networkx_nodes(G,pos,node_size=2000,nodelist=[4]) nx.draw_networkx_nodes(G,pos,node_size=3000,nodelist=[0,1,2,3],node_color='b') nx.draw_networkx_edges(G,pos,alpha=0.5,width=6) plt.axis('off') plt.savefig("house_with_colors.png") # save as png plt.show() # display networkx-1.8.1/examples/drawing/chess_masters_WCC.pgn.bz20000777000175000017500000000000012177456333032432 2../multigraph/chess_masters_WCC.pgn.bz2ustar aricaric00000000000000networkx-1.8.1/examples/drawing/unix_email.mbox0000777000175000017500000000000012177456333026117 2../graph/unix_email.mboxustar aricaric00000000000000networkx-1.8.1/examples/drawing/four_grids.py0000664000175000017500000000152212177456333021263 0ustar aricaric00000000000000#!/usr/bin/env python """ Draw a graph with matplotlib. You must have matplotlib for this to work. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2008 # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. try: import matplotlib.pyplot as plt except: raise import networkx as nx G=nx.grid_2d_graph(4,4) #4x4 grid pos=nx.spring_layout(G,iterations=100) plt.subplot(221) nx.draw(G,pos,font_size=8) plt.subplot(222) nx.draw(G,pos,node_color='k',node_size=0,with_labels=False) plt.subplot(223) nx.draw(G,pos,node_color='g',node_size=250,with_labels=False,width=6) plt.subplot(224) H=G.to_directed() nx.draw(H,pos,node_color='b',node_size=20,with_labels=False) plt.savefig("four_grids.png") plt.show() networkx-1.8.1/examples/drawing/circular_tree.py0000664000175000017500000000067212177456333021750 0ustar aricaric00000000000000import networkx as nx import matplotlib.pyplot as plt try: from networkx import graphviz_layout except ImportError: raise ImportError("This example needs Graphviz and either PyGraphviz or Pydot") G=nx.balanced_tree(3,5) pos=nx.graphviz_layout(G,prog='twopi',args='') plt.figure(figsize=(8,8)) nx.draw(G,pos,node_size=20,alpha=0.5,node_color="blue", with_labels=False) plt.axis('equal') plt.savefig('circular_tree.png') plt.show() networkx-1.8.1/examples/drawing/ego_graph.py0000664000175000017500000000167512177456333021064 0ustar aricaric00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ Example using the NetworkX ego_graph() function to return the main egonet of the largest hub in a Barabási-Albert network. """ __author__="""Drew Conway (drew.conway@nyu.edu)""" from operator import itemgetter import networkx as nx import matplotlib.pyplot as plt if __name__ == '__main__': # Create a BA model graph n=1000 m=2 G=nx.generators.barabasi_albert_graph(n,m) # find node with largest degree node_and_degree=G.degree() (largest_hub,degree)=sorted(node_and_degree.items(),key=itemgetter(1))[-1] # Create ego graph of main hub hub_ego=nx.ego_graph(G,largest_hub) # Draw graph pos=nx.spring_layout(hub_ego) nx.draw(hub_ego,pos,node_color='b',node_size=50,with_labels=False) # Draw ego as large and red nx.draw_networkx_nodes(hub_ego,pos,nodelist=[largest_hub],node_size=300,node_color='r') plt.savefig('ego_graph.png') plt.show() networkx-1.8.1/examples/drawing/simple_path.py0000664000175000017500000000043712177456333021431 0ustar aricaric00000000000000#!/usr/bin/env python """ Draw a graph with matplotlib. You must have matplotlib for this to work. """ try: import matplotlib.pyplot as plt except: raise import networkx as nx G=nx.path_graph(8) nx.draw(G) plt.savefig("simple_path.png") # save as png plt.show() # display networkx-1.8.1/examples/drawing/random_geometric_graph.py0000664000175000017500000000147412177456333023625 0ustar aricaric00000000000000import networkx as nx import matplotlib.pyplot as plt G=nx.random_geometric_graph(200,0.125) # position is stored as node attribute data for random_geometric_graph pos=nx.get_node_attributes(G,'pos') # find node near center (0.5,0.5) dmin=1 ncenter=0 for n in pos: x,y=pos[n] d=(x-0.5)**2+(y-0.5)**2 if d # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx def miles_graph(): """ Return the cites example graph in miles_dat.txt from the Stanford GraphBase. """ # open file miles_dat.txt.gz (or miles_dat.txt) import gzip fh = gzip.open('knuth_miles.txt.gz','r') G=nx.Graph() G.position={} G.population={} cities=[] for line in fh.readlines(): line = line.decode() if line.startswith("*"): # skip comments continue numfind=re.compile("^\d+") if numfind.match(line): # this line is distances dist=line.split() for d in dist: G.add_edge(city,cities[i],weight=int(d)) i=i+1 else: # this line is a city, position, population i=1 (city,coordpop)=line.split("[") cities.insert(0,city) (coord,pop)=coordpop.split("]") (y,x)=coord.split(",") G.add_node(city) # assign position - flip x axis for matplotlib, shift origin G.position[city]=(-int(x)+7500,int(y)-3000) G.population[city]=float(pop)/1000.0 return G if __name__ == '__main__': import networkx as nx import re import sys G=miles_graph() print("Loaded miles_dat.txt containing 128 cities.") print("digraph has %d nodes with %d edges"\ %(nx.number_of_nodes(G),nx.number_of_edges(G))) # make new graph of cites, edge if less then 300 miles between them H=nx.Graph() for v in G: H.add_node(v) for (u,v,d) in G.edges(data=True): if d['weight'] < 300: H.add_edge(u,v) # draw with matplotlib/pylab try: import matplotlib.pyplot as plt plt.figure(figsize=(8,8)) # with nodes colored by degree sized by population node_color=[float(H.degree(v)) for v in H] nx.draw(H,G.position, node_size=[G.population[v] for v in H], node_color=node_color, with_labels=False) # scale the axes equally plt.xlim(-5000,500) plt.ylim(-2000,3500) plt.savefig("knuth_miles.png") except: pass networkx-1.8.1/examples/graph/0000775000175000017500000000000012177457361016216 5ustar aricaric00000000000000networkx-1.8.1/examples/graph/roget.py0000664000175000017500000000462612177456333017716 0ustar aricaric00000000000000#!/usr/bin/env python """ Build a directed graph of 1022 categories and 5075 cross-references as defined in the 1879 version of Roget's Thesaurus contained in the datafile roget_dat.txt. This example is described in Section 1.2 in Knuth's book [1,2]. Note that one of the 5075 cross references is a self loop yet it is included in the graph built here because the standard networkx DiGraph class allows self loops. (cf. 400pungency:400 401 403 405). References. ---------- [1] Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial Computing", ACM Press, New York, 1993. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html """ from __future__ import print_function __author__ = """Brendt Wohlberg\nAric Hagberg (hagberg@lanl.gov)""" __date__ = "$Date: 2005-04-01 07:56:22 -0700 (Fri, 01 Apr 2005) $" __credits__ = """""" __revision__ = "" # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * import re import sys def roget_graph(): """ Return the thesaurus graph from the roget.dat example in the Stanford Graph Base. """ # open file roget_dat.txt.gz (or roget_dat.txt) import gzip fh=gzip.open('roget_dat.txt.gz','r') G=DiGraph() for line in fh.readlines(): line = line.decode() if line.startswith("*"): # skip comments continue if line.startswith(" "): # this is a continuation line, append line=oldline+line if line.endswith("\\\n"): # continuation line, buffer, goto next oldline=line.strip("\\\n") continue (headname,tails)=line.split(":") # head numfind=re.compile("^\d+") # re to find the number of this word head=numfind.findall(headname)[0] # get the number G.add_node(head) for tail in tails.split(): if head==tail: print("skipping self loop",head,tail, file=sys.stderr) G.add_edge(head,tail) return G if __name__ == '__main__': from networkx import * G=roget_graph() print("Loaded roget_dat.txt containing 1022 categories.") print("digraph has %d nodes with %d edges"\ %(number_of_nodes(G),number_of_edges(G))) UG=G.to_undirected() print(number_connected_components(UG),"connected components") networkx-1.8.1/examples/graph/expected_degree_sequence.py0000664000175000017500000000136412177456333023576 0ustar aricaric00000000000000#!/usr/bin/env python """ Random graph from given degree sequence. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * from networkx.generators.degree_seq import * # make a random graph of 500 nodes with expected degrees of 50 n=500 # n nodes p=0.1 w=[p*n for i in range(n)] # w = p*n for all nodes G=expected_degree_graph(w) # configuration model print("Degree histogram") print("degree (#nodes) ****") dh=degree_histogram(G) low=min(degree(G)) for i in range(low,len(dh)): bar=''.join(dh[i]*['*']) print("%2s (%2s) %s"%(i,dh[i],bar)) networkx-1.8.1/examples/graph/karate_club.py0000664000175000017500000000062512177456333021045 0ustar aricaric00000000000000#!/usr/bin/env python """ Zachary's Karate Club graph Data file from: http://vlado.fmf.uni-lj.si/pub/networks/data/Ucinet/UciData.htm Reference: Zachary W. (1977). An information flow model for conflict and fission in small groups. Journal of Anthropological Research, 33, 452-473. """ import networkx as nx G=nx.karate_club_graph() print("Node Degree") for v in G: print('%s %s' % (v,G.degree(v))) networkx-1.8.1/examples/graph/words.py0000664000175000017500000000542712177456333017734 0ustar aricaric00000000000000""" Words/Ladder Graph ------------------ Generate an undirected graph over the 5757 5-letter words in the datafile words_dat.txt.gz. Two words are connected by an edge if they differ in one letter, resulting in 14,135 edges. This example is described in Section 1.1 in Knuth's book [1]_,[2]_. References ---------- .. [1] Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial Computing", ACM Press, New York, 1993. .. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html """ __author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)', 'Brendt Wohlberg', 'hughdbrown@yahoo.com']) # Copyright (C) 2004-2010 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx #------------------------------------------------------------------- # The Words/Ladder graph of Section 1.1 #------------------------------------------------------------------- def generate_graph(words): from string import ascii_lowercase as lowercase G = nx.Graph(name="words") lookup = dict((c,lowercase.index(c)) for c in lowercase) def edit_distance_one(word): for i in range(len(word)): left, c, right = word[0:i], word[i], word[i+1:] j = lookup[c] # lowercase.index(c) for cc in lowercase[j+1:]: yield left + cc + right candgen = ((word, cand) for word in sorted(words) for cand in edit_distance_one(word) if cand in words) G.add_nodes_from(words) for word, cand in candgen: G.add_edge(word, cand) return G def words_graph(): """Return the words example graph from the Stanford GraphBase""" import gzip fh=gzip.open('words_dat.txt.gz','r') words=set() for line in fh.readlines(): line = line.decode() if line.startswith('*'): continue w=str(line[0:5]) words.add(w) return generate_graph(words) if __name__ == '__main__': from networkx import * G=words_graph() print("Loaded words_dat.txt containing 5757 five-letter English words.") print("Two words are connected if they differ in one letter.") print("Graph has %d nodes with %d edges" %(number_of_nodes(G),number_of_edges(G))) print("%d connected components" % number_connected_components(G)) for (source,target) in [('chaos','order'), ('nodes','graph'), ('pound','marks')]: print("Shortest path between %s and %s is"%(source,target)) try: sp=shortest_path(G, source, target) for n in sp: print(n) except nx.NetworkXNoPath: print("None") networkx-1.8.1/examples/graph/unix_email.py0000664000175000017500000000517312177456333020726 0ustar aricaric00000000000000#!/usr/bin/env python """ Create a directed graph, allowing multiple edges and self loops, from a unix mailbox. The nodes are email addresses with links that point from the sender to the recievers. The edge data is a Python email.Message object which contains all of the email message data. This example shows the power of XDiGraph to hold edge data of arbitrary Python objects (in this case a list of email messages). By default, load the sample unix email mailbox called "unix_email.mbox". You can load your own mailbox by naming it on the command line, eg python unixemail.py /var/spool/mail/username """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2005 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import email from email.utils import getaddresses,parseaddr import mailbox import sys # unix mailbox recipe # see http://www.python.org/doc/current/lib/module-mailbox.html def msgfactory(fp): try: return email.message_from_file(fp) except email.Errors.MessageParseError: # Don't return None since that will stop the mailbox iterator return '' if __name__ == '__main__': import networkx as nx try: import matplotlib.pyplot as plt except: pass if len(sys.argv)==1: filePath = "unix_email.mbox" else: filePath = sys.argv[1] mbox = mailbox.mbox(filePath, msgfactory) # parse unix mailbox G=nx.MultiDiGraph() # create empty graph # parse each messages and build graph for msg in mbox: # msg is python email.Message.Message object (source_name,source_addr) = parseaddr(msg['From']) # sender # get all recipients # see http://www.python.org/doc/current/lib/module-email.Utils.html tos = msg.get_all('to', []) ccs = msg.get_all('cc', []) resent_tos = msg.get_all('resent-to', []) resent_ccs = msg.get_all('resent-cc', []) all_recipients = getaddresses(tos + ccs + resent_tos + resent_ccs) # now add the edges for this mail message for (target_name,target_addr) in all_recipients: G.add_edge(source_addr,target_addr,message=msg) # print edges with message subject for (u,v,d) in G.edges_iter(data=True): print("From: %s To: %s Subject: %s"%(u,v,d['message']["Subject"])) try: # draw pos=nx.spring_layout(G,iterations=10) nx.draw(G,pos,node_size=0,alpha=0.4,edge_color='r',font_size=16) plt.savefig("unix_email.png") plt.show() except: # matplotlib not available pass networkx-1.8.1/examples/graph/roget_dat.txt.gz0000664000175000017500000003661612177456333021360 0ustar aricaric00000000000000‹I®Fm}Ë’ëÊrÝœ_AkbI¡{/ õ¦g–mM ‡mExàšD7qIðòѽ·¾Þk­,€½4àÞM YYYYùæßoÿ÷qºoß§Ó¸½Íããÿ†ÇŸ?[<½·Çv~ßþóøøšo¿ÿŸíp9ðùaº?nÓÛó1¶›¿ß~Mãöq·÷áÏËa¼©Çc¼ïœ‚_þéŸÿeûßÇûïþi¼Œ·á´ýÏ7Œ·ýï6&†9>×Ý_þòõõõçËóÏóíã/ûùúó4¾?þrRß?çZ¾Ö…ÿ/óc] §ú_áò>ßÛº ×ãîãÒó WpèoãxÁ ûãpùÄy;|ÎÓø¹ÝÏ—÷ç}š/¶rΙnóC ¼>Æýï¹îÿFþF¨ý3Pû7KÆíßþãßm]­þõî_.Óçx»OŸëoóýþ§Ûø>ÞÆË“N—íÿäÈÿñŽåŽ÷áy{ÞÿaëJ®¿ à<üÄ‚0ó8žþuÂ’°Ûëiä´‡Y²Ån§ÇåB¿†Ÿÿýí?Çýï÷ç™(‚°i÷­ë¼ÿ‡B­1úúw7þÀ¬]¿Muëú¸uÿÇ´u ¬*ĸ]ÚÆ>oúéòêá¶2õø„­O ªñ6g·ñ÷çÛhyLÃ èØ¡Uïñ‰›ð¼üជ6P· ¾Ç`}0oât%^îÓÞ§mÁ¬ üß÷h„¿SÌ›4þø¥aÜb´#dÌõweÛ‡¼õ>aè¼)ûé¶ž V”¶™KßÔÛxH ;×mZ0͸ h?º° 9l\7ÝÖ–u‹ecä’¶µÛ8z»cgžÓ…P8àÂõûùÕÞE¿q~:ŒX?Má·=°ßb|ãy܆Û4²ñ†•ÄmîÑ5¦÷FR;§É}·-%Ì7.=/¨ñlÓú‚§ù2_þôýMb§Âív—ûtžN˜”o±vW[ÎÐqŒÛ6z”Æ ¾ò½šd.xâ¨!nú¯¶êÞqä…ÎÛäܦw„è[‹nÓ÷Ÿ¬¡)kE=ÆêkÙôžŒcGTœ`àÄ,Púp½Íùñó jö›>8;g ›Ã5b ø-€‹Ž]·MXuÆ8¹¯›>a)¯>Nëż@7>˜;ÿõ9ØnÞpâ7}9Œl¿ëIQÛÀ¯#Šøˆ<ö&ú°ñÐú2—iãÝy.»ì¶)xÐ1ð*ò ˜óL´¡­±!?s¶Úmkî7Þ„áqÝáq<àƒHÞfÁï:~€‚,¨9§-L³ÅÀS³-%o|¸Ÿ‡Óɱ³D06 R «ïxÄ"ÁÛøx^ÇÛ4k×Ù6.‘дícÝDê·w¹Á×›æv~›.F•„ž0lb,€^ç»a}8袪6ÑçÓ¸YqÖN,;Þò$‡`”[É=\a€q1rà.1!ŒmR£]î a!þÞŒ;‹MÇ8!wF½d<¥² >qów 1o¼ Ê|&skkÉ¢ºn«º\¸ß¢Cë¸I¯žá2ÎÏ»æOm(Þ>ÉA0õ<©!÷ä.øM걟ö–WÆs­ /ï0O /Áj°y×TòÃíÆÛ_„‡ö‘Ý@ȉ4ZÊ& º6àÈš]ã%¯·q?´Ãž˜Ùò:ïÉcÈ]A&%Û¥šî`[ÖŽˆ'ôPô<ïœ(s¤ç ËØáLpˆH>Î*8ŠºŸv ¸T߯ér™.øºF¯<0HyMìÉ›A ‡/wãå°£t‚]°½„ nzvçépQyuÕ Ýäžwçty’Ñd|÷@ò·GŠk³uÀ i=79P¶{ßä8@.=¿†‘ì-íkßÌŽ÷Ƭµ«›Ì»ãJ™Ïˆ…œ×ñÆùæü>ïŸ÷´6¹ìO˜WÙ&WîIÂéTÆ›èÇ­P@Që ²šMq&d‹êtAþI °Òcþ}¥07i§ô›âoO`ÁkÞ$¹DÀÁJK8?OU(~S"Åä—h€n¼=GÓ$*¾µÀx òò(ùò<¿„7¥à/€¨ãTÈÅð¶ž 5î@Ÿ›Ú=%IK@eu:SÕ {žªá2‰ZÉÿˆ[ʤ›ÚÚ¥‰Û¡úÃó £É]&$ˆ<5¼MwâÕ.B´^›šð÷·NyS3ž¬­!óÜËØý‹µ¯øv¸}›ÇuêºN_ý*ž¹wHûÛù¶=Ï`ú®ëñ¬ÿ+?|kÈ·ï¸úox A°ó×Ó³m"WØq ”­ Ouáý6Øèº~!‘uñ_ÇÛ¼Ó7 Èx’´sçaÜåh—4nxpD¼Ëïã×" èî„¨ÜÆëø˜šŒ…‡NÔ …g4Ab\ÑMhu‚l÷À†—:Jƒ…)«R΃hã°M rÞ]Fè<š}ð —xpØ©9N¤sÞÎ(ø±=<…à5^AÒ÷ٵ͕öœrî·i«¡D`¥3p‘Ò!Vè™ï}Âè ŠD õ dŠÄ sO²xZ 4Po‹WWëR÷ÇÛ|™¡FÝð 2*>ÃeÐÓé~Fk²'>v×E¼/í°Y¼4Rrï{Ü6U jüÒ‘ÁƒWºÞƒ—Þ‰#!^öÀQ‰y£ÁâýçåDIïÄšÒæw“f쬞 ›0Ø´ùåZrcÈ¡‘›ß—K£%cÒ½ÑH_çÓÁ®¸jëcGo²X9‚ø»ó@¬w ŒöòýÏùù8â õOöºŒíÀ ¿ò·¾ÅO‚ ÑØ‘š©Eù4 EÜ÷vaW=·ßçq¸¦¶õÅn^…wn“ÚöÒh`& -A?B$bR³¯ó~?ˆïRçiͨ,$(»—SéœäE<‡f:BJ¿?¦OéÄcg¼‰¼{9vpPéu™bo:RŸ1pü•{•0ƒ·ƒ4íMO¤ ÍÇÊçdzé]ÙÛ! Ñ \HM›w:TÔu÷g ‚4†>ƒà¥DB\Õa½˜òH²Sªkš¡P!xû&Ï6¥Ý©ÒŸµil˜€ôCñòòÙ®EêÝœZ§5v·qyA>$ùWTO™,-eà6:4šOO#Rœ'¨zxÚËpî·(ÒÒF3i/zîÆ­áÀI2Æí=°§Ã[»ÅMÚ¢R‰U ”“ˆÞÁ Þ¦S»ªÄLø¿Ðè A1‘²íf*†¡˜¥J­Žù@²¸ð¦fTq2¬^Ť8GU¾ì‡'™_¤žÚ™å%Ù14óÅ6œ†XGpè&YÒtünx˜Î0AvXmahœø›ËææÐªBH Â…KàC_¼’U|ž.üO9Wz&æ àô”¹|hnI±IÑhä(EcGR¸?À—>x¬[Ä}̸Ûl ø5¿·cdzNã‘Mœx~6Dsײ”„¦:no¯ËdPä9Sðhw"I˜Šbn¤ît}£!È0CSc•!¬—ZÝ P*Õ5Š=„=è@+×ó|Xä<2:ízÑx™ª•*ô»€K¾Ÿž/2*¦ƒÒ8ZC7¼ÝùšÆÞom;£.²A¥) EKû =TØx¯š–c›Ô64h&¦Æ’™KîØ¼Y~@À”{ŸdMòrd²2‘'åÞ™ÈkEhžî×AÐUÚm¥ª:ˆº4—Ò€Ù“ž ñþàŠÇ‘Ü »‚"?4ôšd1BßëIêa§®RÛÃÖºé³IK˜…‹6Ó›;Í‹t*¬WGß5Q®R±ÔÀ‹‚HÖELõÊãdRðF780¿NR3®#ÐFYCÛ™]o,Óx Ã5N—#M¡iG¶¥^ ⾦á $bG€(ZD[§šC{'·®f^%dÊ—PÂð?ÔëcØŸhåL¢vé©VgÝØhUïÓ¿JKÃU q¸jf³rü‚x4¶O«“™¶êËpÔ;³šÐ´….ãëÐöMæ´µ)îüofÙf§IfÁêÌðű¨"K%1ré;êžfanîq€ð¹@dL™6š*¨aÒÌi¢®²Ïá$ëCÚæ…¸ÌŒÔ«}bä;øÃôÚ*ó´ZÚ"»|2Æœm¡Ðƶ±T¼(÷ãÜØÂbÜCk¼¤AƒX¨ã´o[DQ›mÃB+8B‰uvm.¤ÒÌ„6î4™ÊæäÙ©ŸNƒh‘†Z›{çãôq|ÑÔ18 -¯84"ŠDÀáxZÿðâ\8Í&•¢;H¢9xñ*@.Ç¡«@’K÷ãpzõcÛ|ž¡ÇrêÌ)1–lðµ¼ÑÂ(z®¥BXy,>…^Fêø›þ¨o¹$ÚÉIBP>E‘‰Cô[®W \ªo±¹ÄYøÒÓKs:'Šñ=õn°Mü?¿¦¿>m|Z’cäzÙò¡Ð¨éÓžî&WH®XfŸÉ…VmCÎ;yl_Ddí%ñÏçÕ”BZ;à°¯¹Mo…|AYŽ”ê»ý @$âóŒ4Çxših'áá„%2ïduöº}Ï¥ÜFŽ ¥ Ÿ*sìqþÐŒCsâü6˜J°Âíe.B;I¯÷ÁÖãSóþìoÓÕczXðº—ÍG×g(+Ô*vÆu€2_ÆdK=/ËÍ|§µ×8šM‹%ʼŠ#x¢kc‡¿ öÏd”™ö+|ïo8ý;³²Èà©–4äês?Cs­öIÉï$Jÿ±lFèþHØw:l—gØüi«:@w‹ë PÔ®GR\³PSBØPï?Ϧì’øÕ•¸cGf26C‚[Ù§mE+ú^}_M71ÊÅ[N‘Uìq#2’ë…HÚv¼—>åDÏ¥7Ø‘¥‚ûö\™tmë—y¸sK0LM¸§éø²*<¸é6?ž—ƒºêA–¶òÃvÀG Q’— ÍêN²*–÷Ó°€Õ‰9õ Õý 5Pä@.$€ò®!Oa€ÔÝ®fýç;qC|(Pc äÞNÏfÇ@c<èïçy~¿ÍYÔÅ{:®üm~~´—Á€@)\æÇþòKñ:;ÿHï†æ/²H|ËóÕ”sƒ•}iÿ'œ´#sŒ²P¸8<é—"3Ï#Äèñ@;=e‡Ü>®Wr~ѳ;ÏMZ>{æé/“>è›CfZÎåžz¤Ó­ð’,$d'548Ú¹‰œ§ö¿ÍO\©Ø? nº.šzÞ{9\°AÍ£&0¨yv’iÐ4÷9‚sA ÂœÎ ®‹¾_9ËÈ´8^yGoŠ5E+ü_4 4ºÛmR/ÎÍG¸ApŸÒ˯0]õ:CJx˜uÞ¯&)šý¹ÊÒ›#­Y/ðý¾\b‹uƒªp=íy¢É®$iÓàÒ4ƒíÖŠsYªy:íˆtO»Ã"KþØßF“±Ûò8‰˜pºÜÇÛ ‰ïÌ@ß3­d ?éŠ.Éý†!¼õ:ÜïƒÝä¿ð#¨a Â.=ý[ÆÉMß#Þ9‰ÑûùL×¥ÓýâjÌ!ÊH‚ÙÚC7ÕÞ±¿?ŒöØÄÛˆGa<ŸËÁ£ÜK%«–p»ž•Uì=¸t‡ëNKï…H<ËWÜ!ô^Aö¤gÑ.M |zZq 6-¾gÌA÷²¹QÚ@§'†kJ=®p<¦YK‚ôƇîýôœ;°è…ˆ¿? ­Øcœá@™ë =u‡ñ}!ÿå1‚ÿ„P ý¥í¼ ‡çá‹›„ØùÚøq˜n­v¹ŸÎ3dA.Tç…ï[9N>Ü~6}[ʼg€‹'ê4d= —ƒ)p¤qZ ‹»;ˆŽïPËœô{ü= “‘<Å'L€Kù~D %úé®±4o”)r€.†éc¼‘Ýî$Ü,\ -…’ þÇuÐÝ)ð$ pTŒ36YÿºN×±=焸ÞÇó$Mí°ˆ×¦R÷ö|{£—4@QÁc’ä@WsY`ñ gTÇs‰9 »³½Ož×6þÇÕt'2HÁ)ηá²î"´…2è‚Ç)¥éòk‹Äyóizgüž«il.ÆÈS1ã;w!á´ 4t²ç{®7U3ãY!^SýLÁ\ùý"eÈZê3”ÂÛUQ:ÀovRÁ¤¸©gîéèmP‰Üša?þsüW»»gtSÖÐì ÜÈaø´–ÄifwœžœþužOó{b9¿±]ø-µ`€ûÅñØ{³ïú,]Oúþ>pþž§‡©tìXŒŸ–î<\~•ÔhaJÅÏ”Ì@½âsðÿ×üë ×^øëñ§"·wr²e FÛ& (amÃ;î{+ßlµØø×V 5M ÔVxB¬£0@soƒ@Ëm¢ú†ëü©¤Wgž+™y顸TpîuO—ü˜ŸT|)÷%Í¢~õÜ[éõ7b%ÿ„"ëkwÝì<Êa\ùgu{j xg˜ ö`eN«Á€<Íšàƒ!IK‚˜åÇjm&‡·ññ2@¡¹Ðzê!ráój;CÅ{<„Ò3žv²F&ƒ'?Ž ÈY¡±Xf-0}ŽR‰B<¨ #½6~ ©-tÝ•7ª¬:ÍÜ ÃhRø›Ãbðúûð mÄÎw`(V矗_r§@L1iáþ5Žö‚#Æ»”}øžæà [CÂ×ÛÂ+c×:\) _YCWñ ¾ÔÍH œ‚ë0&H˜šÝþÄß÷Éî&ªfŒc?˜Êõ§ùi×(Ÿ³)%€†£9ÿ*j€Cˆ!ÍElEpá~TÿŽ7(òâ¹Ô|ÀÊæ‹«‘3GÆ E©~ÉÖEG¡©Yetúëse`ºïöØ{ƒÆÙ*{÷<=—îtæ*×Í–Ê+Ϧì-hèF|ñ5G ç'ŽÎŽ‹ }´¶¸È"ð]/§áÒÞc¡=Á¶OzgÇúq{žeéÆóŒã"Û™Àè±I”s†wÃgÏaëÉL¤:$ß®]"Œeøî0Ü~oêë‚ ±Rï@˜íš!’j¯=c¶±Tߟžaž’¥Cí°nïïÇá0J¥·Î|$C±3¡9Xœ¯&)Y(7Á!˜‰·ä/]ì=ßáÂ=‰ºiœ'©ú¢ð ÈŒ4PÆqz†ZÖ¯ãôX'±¸Ð½†ý÷ÕsHü‡ŸKc<Ô¡»A…nÙì¥Ñ7b|Ó»¼1…øs\MÇ„Òõy{…o‚¾C~£Ëʾz<мkO°‹¡2ªyl–‡MˆÝçd:­‚ì] w·€ûøÁ’£.;úŠÑýá®mBf‡HÄû{',ëepHê„™fW´òŒ{ÈXcs2Æë‹a¾>þ iÒFPäâל² ¸èÙ"Æ÷6&Ül­­©­Qþ™¡rƒÚu¨eff\©–-ÒŸ¡Ë¸¦ ƒjca<ö«Ãâ´Ò‚6*V ÊÛžHLÉÂã†Å¥îÇâÒÑ›ŸÜãHãÍò´ox§«DQ0m(€Ÿz˜Réµc&5#ŸÔHŒæv†@SÓ’ wá1_Á6êÖºyŽŽÇâÈ4›NðM ÈH‡Þ¾¿ŽºúÝð ›L|,.ƒ%Y°‰Æ0\¢Ca¨îÚ%åÖ¤´¸ˆ¤‚T÷8”;æ¶k3Æú»n‰b”2Ŀڂ38ÙjÚ^Æ¡VhÌ+<‘N¥e2 ˜Å|ÎTŒ± s(Cª”!@ßÔ2CA3”j>….‰^º¬±„ž¹ì‰ò®x†h{À*cOò "*Fb´ñãiìžf¼}eß ÐÃÛt·LPYôly õ윓¹ÐÍáØØŒMú7mh`h$8EÎgÏ,¨ÑmKssly&nP×ÃÕnå÷ÐìB$“ªËPªÜÖÚÕ—aé¢6÷õŸÖ¾NûGê¤Ó…V!<Ñ¥SþÁû¢dµl‡}$šI±ª¾_éHjÇšŽsn D\-«ô ùö>µ@‡…³‚2›Jè C"Þ¡^ÃÄ\iÇDÄÅXõ€a¿7œÏs<¼Œûñö ¨ÁøˆØ¨¢Ú kåqW„ê·fÁîƒhgˈ) ¾ØÑ_Ì(™ I½dΓ™^0›U[~g§”®gŠ)rýË+GeÔò="-¡šÃŸ™@°Œ'|pe⚟—¥3·¿òÓá fR:ƒï:2äZö¸ØŸv/5L©5/Xµ: ‹í¡¨ÿ½­Q“½·áCÂiÀ³ü!îŸ&Öq„ræ‚E”ÈΛÑÁ17J=R#¹viÑ#;ôe œ h‚¿_毓¼ŒäçDR]ö;5º­ÚN"'Róéãùôq™›ôÏ£ íBŽ[#B»ØçÓpÛéqÇð3üÙ/Z?ȧ; NîG@ID$• 0¾kËÅ3$|%_²YâG²Æj…ãíFÉÜîwÕÒèµeéŒD]¯QÞÿÎô;¥Mþï†æÓ5å<ü˜Î»NlF³ãÜôÏ›Ô5ÞA…ó;‡ A­œ‘¯é~˜ÏM–©íÆŒ†,µ$ÒZ¬g»ã€]|˜Ú§û£8z5~¶ë·ü;Ë[¤¶h!VÊm²P, éÚÃ@½,Ð Æ3£ ;¢:Õ<Ÿvë>QU[¶jÕ  4 oqº´ïÚt®Sà ¡×y8Сíží)®¤3UcÝʤ$/mï Ïê™ßSvðiâB1!ˆŽøÀ÷ lWy‰î&Õ‚‹Mf­a']øBgo|<żðœ‘Q¯\Bn¦x¸ÖÖ™8ƒ×»B {Ó¿Ï4…Qt#šZÝiÅ·D˜6=wNÁÕ‡fv¥Ku¯5æ>̘ÇÞyË*…t9'„Þ;²XÛ£>) cÉvÒõØHÒÑsÀé2˜áÇ*üšÙšÏâàþ’â:ÞAØÕ¾0P.& Zþme*ó’"ÒüÚ?ô«ÏKëi)P¤DfÇê¢c Vg2ðô±\Ý6öÒD㢙{^þØÐbVÍ‘Wjiìh­oC¬B?Ž/¥6í˜ïGÈwŸ34 ®/-($êä¦yÃõÞ3²2\}Í‚”„W ¯´¹£ E™l¤¸åþÐWø&€J@Å1!±µ€äˆ]Æ£Œ3½÷ÖÇÔ±l“i¢Òx¯ÏËŽPZPùþ›’T÷Õ6Õ¶Pg‹¥7·€e1~„/o´ôªîû²¾íô:ƒið<¬W$ÑÌÔµ&’ŽW 8±‡“‰Aí›QÄ·«¤_7B,XŒ/[¾[à)\„­nYY¿€„–ýp~{Þ»XzÑè¥æzƒ¿ª6@Ûuv —ñ‹©}õÛi_ÁI ñÎüJØ_rÛ‚‘@E­Ò™µ tß4LÎ8·!ïïÓmI–;+½„ŒŠÒ$j¯W|¿,Úº!­kbZ^¯}]âºò E/'á¨qØe¯tRÛÈÚ®9×n˜ÚØ™QßzØ­•—‹ÛnxÛ4u'F ª0Ñ7È«.k í¾BÛ^cÒé$.­b RžÀÄ@ ÑkH¢ŒôÃ4Î(Q·þŒŸŒ‰]ÙDhÒN£˜”?|N÷ñHÏ.¹~—_×:A#Àr­§N¬N*ÎjM /ʯže†u­\Ò¤ºþ›t`C¤¾Úå´®1ïX INÑô-àl%ir\æ>x®²žWó*íúÖW3ÙGê’¼F/©„2#A¯ê°ìäÆÃÇj–fÌ^cº M…öA,³·¡á°z÷S÷Lš÷R*%P¶´É•\x’·ty%Þf&Ap„ƈ·Œé¸˜lÇ ãQ‹!PÿÕQ²ÜÅ¢«]øXÀ_td´µ©K“JyÃ,/i‰DÞÚNEr,J Ôõéøî,±ÏAØèŸe4¦Û™`;ÅÊh§~Æûá}½ïŸ§«yP­Ì@d¨\Ãq6J².%†Q‹c&Fçôâ—9Ñ—H«LÂŽÌ»¡^H±„6O&[{*¡=ºÅçå{G%1v®0[Žd%e°pv˜ÒÍ„ðÂùå†[²ì¢Bk%^'ª1Ë–ƒÆ„üWzäó¤lÚ;sù^)€Ê;´Ô\åwY{´-ÌY|Í¥ä»`ÉwÞì†XÞïÖYÈc37¦ÎLú» {³þ1%Èõ WÉî±¥}ºÿP¦¥ÚNhˆYR-$-³µÛ f@/°¨¾‡$ÞQM¨^g RI±¯*jÂÚÚÞs3´Î}‹–PN!Û„Û#GG†Y«˜ê%9‚˜¨RYÉF;Qê$þæyZ2” &ƒMì³RqŸTà†ÜFEÍ"2*‰sµç¨ÔñÒò¾9?Ä',ŠaºŸ–+¸Dñ1˜RB<鉖ïŒäi€_ˆ28…Aûï.—6¢™¼œ!γv…ŒcÏ–d+Ä7c %;æEXLƧ)Ðùž¡&!û—J‰°B®¥u;yÿA4y%ÜÙ‘-N•gá5£>ŒŸÓIvÖÄüee#åüeF¼³*Œü™X{§#[>Áe —¥®öb qY£¹*h\à=F"ñ‰~í3„—…ê}ã^™ž‰1c·;ã’î±øeF¿„Õ’-ºð MêÚ¹äù¬r‡à¹jŸ™«M!¬ÁÂWµÁ€cö>§¯V刺žÝÆ“<1÷£BZÒ^Uo0-4=‹dÐh+Y,®ðŠ XL`ü£Y|„L‘œ”ñF&$ç…Š-ͳ“Þ‰^ýy|[¨¨Åh.‘«^Ih‰Vþ<þÔª0ÒÙµ¤@L-ÞQôåy Yn%D&YOÍ%£¹cãà*f éå m1J9Ú^<·-ãßio¹k°dl–Va©·¸]Fâh]å5¶%Y*¤í׆Zuþ˜qÀ f-—©3D-¯öP×è–æ %A•l–¸GÏÅ+Ž«Üº£‘cú†Õ $#øX®;{\…Í-­`ÛÛ"|[¶•Wlô_ –ad•Š7õfà¬ÌÑe$«Ë„ûº÷~2O?y¼R¼”ëœgcG•ñ)=‘s ¥Ï€¤ï½)Q+t-å#мU¢æ@‹!h¸¨` LL4è_ìX1'ÉN!ºœ]ú`‰ë 9 Wt-z¸n–¶Ç;‘µªèbZ®OæÓ†Ç²òšRq€¨„~ëË…Óò×ÒãêhY ÓbÁ*’eR^ÎtˆbPÜ“Íif8#·±Û 3"KÍR½ð9nxÚ«ÚƒNDÀu®UwKRmCÏ–[±®e1–¿ØœµT7éÝ,ÄìÈñ0- µ¼lÈGÔ "õfÝ]Z$«_КÛŒ4‰[›ØÓJÓj¤ »UŚ襥ÉÛ%C;r«,ƒv5q¡ám8´„ꨨ;àex·ë:Y¶9óŸŸÍ’®ƒ|–XjŪÈí+õvQ1"câó]–\ ‹Ì\oÖluÝ0)¾õ5´lNÕU‹J"Y[Èx[xC~a¦”÷¸ùLiJZ-HìõÈbè[ê»üó4¼­de©Çáô8îlݮ݇œ›™™Í¥bj†‘¸©¹LœÙmк¢ (B7w©l®a!@ï—a{:%Ö©%Òsr‹òòò~6Ÿxfm+À¼Ž–çiDÞ–ÓF6–›ƒd¤iþCò9ïŠúJÖh1öi!ãF×4¡0ëßc³uŃU":G:0—q “*ZDEÈ&•4 7Í*kP:ªÿØâœXÕ‘±®–´"pñ8óŠ9üT`毘/’p¬syƒŒÃÖ–Y©Ê:Õ YXZß/±r«&ØúK$šŠÍùiö1+ÏÃÉUB£ZbbéÑš_‹è~…ÁŽ’|ÖÒRyÉÂS£ >§q²€lŽ¥þU‹È,,в–Te4·÷=(‚¦‰ÜêYúÞú÷iþŒ (ÆbÁãÀ€çqÖShuW¼ø Ï’"¬þ`2i札\NîK=ßóu‡/–óÀ®eC':ÎSÎÃehµ³œîNµ;ð·¥ÎÍïÒ•E%®[fÃu„Q\7>©›/¤oÊ’q0õ$ŸQKÇ ¦=´³Z2¾öWË>“ÁÒ:ùûï2Ìp(ùï»—Já›÷WÑB„ÙÄ™UHŒI^a<{^8ÔûóÔʤXM(‹ìšO߯a1r„j m²ºvÂÝšñ%?¦ÂMgbìÃìÌ*œ#‚ù‘ñåíyù`Æ©½‹x”÷O+‘(ö~Œàº,&h*ÃíÁ"ÚfR Ík‡“]¯´Ø*£ág+ дN‹ÊMòXgw®wüý>ì—À®~k)PUýlÞjåSd–ŠFg½;N—C3)§¹(QX¦L¯Ú<ºEå¨j©Â.«uörl¤—ò/púa:ì–JHÔKÄó—²8Mú²*- Áa™Twïç—{ž’u‰­i«ÿ¢.4g¨f*g ûy¾.N+ດ ~›U2@sÕ¼<(}ž?€=ÆRkU‰¦VpŸ™³©zRT¾²àÅãbaBf ¡ AI|æákñfªú\W@kÇz¶ ”^çôaôñ”¡AHA€¢fAȱ³ùî0¾OVŒ$ÚÕÎ+6{7<Ãþw±êž¼G)õâô0Y^7~Ò­/nûË=ÚðJ%œ8 à ÞÓõ~Zj9÷Ú&úC|%ddb^YMKn«ldæÇÚJï8+zÌ] ¬OÕ@'ÿñ±UŽÑ†ñ>·ŠÍmñÙKyX|ºŽCËsxáSØÁË Yä1°ËãÌB¢¾0–}—äEa]‰´ ¯­N²ê2o²* /Åc“nŽª´„ dTDwPñ¦ ƒSâa)ç¾&ÐÊ[ç±Âà/?ìxn[­Ð­ùT¾„žbLÊK™FÊßÛ¥UyVåÔîÏý¾)_½½ÞÎwïZu\³š©’Ëtvœ:¿ÓIÙQ’Ù³ämˉõ:íæýíÃó*lßÄ“×zBY·ùzü¹³°o<­VñƒJLNÓüÕêk½úuJÏÀ߸K¬Î~³à½`–9êjŽŽ[4ï[‰®jÕac?<G«zb÷ï2¯*W²ÉLßÑÁŠMít‘ÕUl_-œÛŸ†VHv©–E•dŠ•ÊÄ-Œ&˸f‰fü¯]ßL‚OãeŒsWîvLص3Sin¥Øt1fÚG ö.Ò2¿-J6¡ô»hñ„j0”W;¯Z]ÔÙÎÒ?QUã–Ân†k+û%¶˜º3åž›]ýì[Y¬»7t$'\’A4„½¬#¡áSçõMÿh2å[Çž¿pÀ<Ú³—kÈŠ‡E»^ˆ“p ~[køFM©¡§­[Kòk:F8MoßX¹la©ÉüÖ®²Â0½Ûª6¢øZñ8AÑœ5‚„ka}ÞH¬w¬XqÚï¼Y3r*¿ãUñ\‹WkF¬ $° æEžÎ`âÑ*q¹hᆷÛüñŠ~ù ØÇ"÷ |k. ï6åt ÆÀûÑÜnëŽQ}'¤}}Šôë"¢á)­ d/ThÕÜ|;IVP¡¡.³fßqz›VXVã=ÅPjp¤ÂLáònBPg£*L¶Z¼ %ˆ\fÖ7ÝÙÃVbÓ°Jó>0rñoÕoq7Yjº¨xæ4¨‚E³CÆêMÄ7åÏ7F®•PD£ßÜ ã20+\h-$Š·¯7Ê¡¬ ¡àØñ1`,ªP>0³„[ø‘é]djdÚ%)CPy”» ÓL ò}\¼òùÄù ¥þ‚ãkg‹B3ãºHè<ÜPé³)’ß[\UDZq1›Ì€òó²"ªÆœ”Å–ià·MËf”5Û›3®Ê›òÿdòÜžf0€TûÓLKù% ŒÈè[­-OGõL hu ¬…™­Ñ ÐùÚJÖ±PW#Å"ˆÈ×¶1Šjjc@©°V™E>Æ›’Ü«©2±É ô_‘‰`r¶oæ`ìËȽä[$²ú‹µZ¾áÚÆ2ÄM õÍáQ6¥ë–ê/;scX¹.Ñævhã>&E©dÕñî öafE/jzŒ.R Lç_C‡,´Ý±:‰UY2…«F< 'zµ9xaèÅ·™å|ø°È(ÅÍòx‘š9€º Ëb¨Ø†Õа=Ѣ̊ÕqqYÁû­ªªtÈj5nñšjÊó¦bÏXn|ßmŠëÞæ™E³±þ7ìä7ìØ 4·'+²‚þ “]é‰4|àDZwvX³\FhO?Þ…³Ur¥lm‰½Ãê\lïh¾Ü¹VS½0àùýUçóÌJ@—ø+U‘s鋲]ÀY5€Zûmg“×öÀ*9iľûj¦Ô¶ ]‹à\8T‡Ý]™+¡*ìˬrKàUÏ$ƒé±Óõþ0¾= '‚‹-ÂuøifѾ ]Ì¥Aí‚”ÓK¬[ZQ¾²-w¯…á¾DsÚàÉn(ªuößÖ]ÛhYtx}¬çt=õë„……ÙŸL dµÁ„ǵ¹Ì5«Ðhí}§Ô“'3ý¡"®ÛÊ 2ßïãpm€Vïí’¶*º:%Aþ÷¬ºý“;ŠÇ^žâ©S¦ÖÕÀKîÓ™‘=rå$ Ú3ÔDNY¤áÿ÷qM³0¯Ê¦PU'»79}ñ«yýc2¾’×TÇ2ÐÌ院y"xܘ/ñåj¤hÄ:ünü±_j©0³œFú‚$™EØÀBo­—)ÖDô¥AÏÑ{¶•ÒƒïtÈüÒÃì]¡T¡U AËð‡b±yý}ó‚³-Ë!ø¦RÑʪD+$@ÙJ2h×—•×RèXS€w³µ[â ¶&¤6ãZ•Y…Zƒ%ÊÂ–ÕÆk“³š—ÙÈ`±ßŽÔÜøòûu¯P>'j'O¨kÑŠbå¬)å; Ø2qm?$a­\ÃH·,rS¢¬ÂáTÎDi}“‚M kZ+ɼÍ­Á*ÉäPgñ>ê¸$ZK  ðÌi@p†ÆÛ²ÞH ?QySh aoœ—Xƒ¨kÈ\7ãmümÆ]ÌëÄÒÚñ¢³ÔŽ¿Ô < B™¬”gsÁ0$¡%àÛ$ËÄržŽ±˜cdDz¹å¢vªÍÑ’z9úŠˆÊ´{í–Fg¤-®SjYm1txó¥Lz«K_š 4Ø W¿!ãûfõrºe¬ þ>4œF½ ð,ÉŸgh‹ 7á½ÃÓ<_¬¶ÇØU£Ô…1lÿ??ZEúµH¢~Â¥ÛZ:{Yý¬ÚR4Ç€º…þKHäRʼs¾…[;|ŠÂzð2ÞQ%£^®®Ð@Iú½!+idO2}¯[«y¡ (Ê©ÐPðù|ž>¬¦Ssõ𣹦¦·L¨´ìw*lqªïÃý(RÓoOõ ãÎò‡¨–§:–¦é÷¦éSlÊÝm:ÐL¹´(-C¬·ô)Û)oÉ„ðg÷N«5f7æÞt!%vX (š4K‚3­;â4dßæaH’8ž‡Óðü`þŸîyÿûn ö©%BóÕ£ÒwZÞêê#[yQNl¸šdõ³xŠjÅC™ó;«2—ÀrO‹\y惎mÐJËœyÿJ.Œqѯ%ФÒ<¦ô›M°(®†ÙÀ_ÃíÀÛy¼yñ6hpÃY‘€v=É×TÌFCs8>ûṦv™c®7[Ë¥R4ç*=£™”6,–‡_÷h¿:ë¿E¶â ¿þ„ª®(ìÛ]Ù²Ãu›EÎÀ{OÓï¬nbæ ­-ö5´‹vI–øÎ^‡i¥'Ä8QIŒ ¥{ã~ü­œ’¿fýÆWI«+*g»X‚þh‰kÒµœoXNZM0ùi =‹^£C›j¾õŠw·xh®ˆ`Ö(ŠÜ:,1”v£ùoñî €zŽg(µÄZ¥QÈZ‹Î—¹Ýó¯AùÜÓî0_ÚÏ'dÍC ÄþÇl'Y!dMŒÔñN/hHâ5¶IB»(y4€:o§ºæÏ–šÈ¬½üjñ˹d!rÊiµ°¢ülÇ&Ä£Êòú—ï§[Ü/h4±~܇ø÷Ö =Y7rl…ƒÁ–0ÓÛLrP‰>× i P²¤·Vû^?Mqo/=µë¥f›"€ÆSh¼ˆË4áíô¤‘L¿+PŒ„/¾ß&HãªùiN„*ë´ ù!©§(öâÙxÑï+ц̓cÆû¸È-h‘m¼Õé€ħ³9èlÀ€÷yù9(roùš)\ÃuÚ"ü—ߘbåùlzoíZ]ö*?´ºûOUàXY¿0àä´ivvÓ‡Ò°á,½õ%××N3*ÐDSÉä¾U? ™ç±è‘ÜÀ–ø`½ïÏÑNðЉd3_.õËžâ¡á´TÄé*íO.½[&t \—–ŸbÞÓl2´u/ ©4Ð2î§o²·$Iõ”Ý"{×6e½?O§VP†–«W ¼íùëf¸ ìw*…@Õ»3‹€’ßë7Gg…ty¼êAëÓã YT¬²HÑç|åÚ×ðJø\~È!YàžáÍ5cüT¡­çÕÇ3”ð¥‹j¢•¦oÈÏÔ¿ˆÐ3ô ÁŠ“>±û’TË+‹<ÿ?ŽüaÊåÿz]V¼‡ÐoËõ8 ÛoüAO J¥TðT2¯Wš³ˆETßqu,F»‰Ÿ"<¤–ÊŸšNÛÃÌâ…íg;èc­ÔK-‘Øñ EiÆx´”¶àÜ{¼›Ö{[<@dÓÛ>zæÆ–(éWI³ö3\Õò‚>sJ¿?Çyõ,Ãý±üä‡\Ä?•PþkuÒÇÕ¼Jµ®¾þ6Šø£d´ò©?ÜMyÏæ’èÍ-bsû†fÅšBÿuÃÉÚU5½ g+ü©4håŠÌLk6’µcª û‡çemH^ljÖ1âbp’C!• lKX|Ð$±0Äi!Á¹FÖᵪ}±¼ÒMü±ÒÁ©2t@M(L"aY™v©˜Â‹½°wédK;é¤Ù°”œèUvKm;_»U+ï-FPt ¿-Á_~ é´SÚEùËr½e¥¯­Y\]êu¦&êT’§€íÚÞË…W€=ˈ?¬F‡iy(ãÛ–¢µ6ÂrÓ€èbl½@艦ÖÇBL ²nùó[¾¡U€iiÔy.?îDb3,1оØ5 )Ó¼)bÆ’ #ÜŠŠ¿©³Ê-Ö°¥‡ò¢+&ÔøUav5¹ß/“ÈÁ¹h0U?ƒ©„%ˆ­ŽU›­'ŸpŸÞ§&Áßär>§ÛƒÑ1Œ®X`´) £‹ñ·¢O"Ù_CnPàð^f+?né­ùuª ìAwzN'¤zdž"…Šå(Ùkélfã?¾¾ Ö†<ŠüIV*11ׯ*W厥ÙM(qæþ¡´EzRî VÃK—f´0àñð˜/-¸’M¼~Ÿ¬Åµ9ûáTŇf@”Cûý2{/$åHSËAá­…O‹ÓH?ï$;0u7ógó{“¬t?C´Ççãô|ø» •,P ³[¥:²{óäŠEš Ÿé´¶b¨bTˆ‰ñº’~Ã<÷å"£w¡2U¶`I%ëg¶ =ïèïÄ3Öϱü c–k,õ4|ñWOÔ­vø¦46THÉ!ò•¹` I/’¸\åÏËÆóefs;œíèV}^–Ü jÈ‹I†eŪ¬^NÐ̾4ùþò6‚^†i›¼W’Õx¥ÜåÃØ~ ³0pk¯ØŸZï [À—F©ø§ûíydùoœêñ«~6³?Ž8ðæVåÿGU„Ôfváqli¶ÁB^ù[˜]œoÌ™Øãؽ^¥#ƒ€—wKá·&Ÿ·ê>’Íå”åŠ?ØIOÇçò;ìQ?jšÿ\ïãó0ÿéÛk¶Vg¾¯W#^q»ud.À‘굊4;„Ľ>µ“]×K‹8rN??}˜™/‹àp_³Ý„–ïWšLÍ_ìëø“ñ3`£_ váNéõ\zËaO½†‰¸`þ™¬‰•¸X6Íåý‘ŽúÞ³"ĺ²?Ù/Œu./(s kË!à¯K дúAè3Xý¬ü'³Û¸ØYWUèõ«¨=9 4ÍÍßoÿëåÀô°w\Û¿¹Íããχáñ7›ÿɳEqÜ‚networkx-1.8.1/examples/graph/atlas2.py0000664000175000017500000000167112177456333017761 0ustar aricaric00000000000000#!/usr/bin/env python """ Write first 20 graphs from the graph atlas as graphviz dot files Gn.dot where n=0,19. Requires pygraphviz and graphviz. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __date__ = "$Date: 2005-05-19 14:23:02 -0600 (Thu, 19 May 2005) $" __credits__ = """""" __revision__ = "" # Copyright (C) 2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as NX from networkx.generators.atlas import * from pygraphviz import * atlas=graph_atlas_g()[0:20] for G in atlas: print("graph %s has %d nodes with %d edges" %(G.name,NX.number_of_nodes(G),NX.number_of_edges(G))) A=NX.to_agraph(G) A.graph_attr['label']=G.name # set default node attributes A.node_attr['color']='red' A.node_attr['style']='filled' A.node_attr['shape']='circle' A.write(G.name+'.dot') networkx-1.8.1/examples/graph/atlas.py0000664000175000017500000000511512177456333017674 0ustar aricaric00000000000000#!/usr/bin/env python """ Atlas of all graphs of 6 nodes or less. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx #from networkx import * #from networkx.generators.atlas import * from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic as isomorphic import random def atlas6(): """ Return the atlas of all connected graphs of 6 nodes or less. Attempt to check for isomorphisms and remove. """ Atlas=nx.graph_atlas_g()[0:208] # 208 # remove isolated nodes, only connected graphs are left U=nx.Graph() # graph for union of all graphs in atlas for G in Atlas: zerodegree=[n for n in G if G.degree(n)==0] for n in zerodegree: G.remove_node(n) U=nx.disjoint_union(U,G) # list of graphs of all connected components C=nx.connected_component_subgraphs(U) UU=nx.Graph() # do quick isomorphic-like check, not a true isomorphism checker nlist=[] # list of nonisomorphic graphs for G in C: # check against all nonisomorphic graphs so far if not iso(G,nlist): nlist.append(G) UU=nx.disjoint_union(UU,G) # union the nonisomorphic graphs return UU def iso(G1, glist): """Quick and dirty nonisomorphism checker used to check isomorphisms.""" for G2 in glist: if isomorphic(G1,G2): return True return False if __name__ == '__main__': import networkx as nx G=atlas6() print("graph has %d nodes with %d edges"\ %(nx.number_of_nodes(G),nx.number_of_edges(G))) print(nx.number_connected_components(G),"connected components") try: from networkx import graphviz_layout except ImportError: raise ImportError("This example needs Graphviz and either PyGraphviz or Pydot") import matplotlib.pyplot as plt plt.figure(1,figsize=(8,8)) # layout graphs with positions using graphviz neato pos=nx.graphviz_layout(G,prog="neato") # color nodes the same in each connected subgraph C=nx.connected_component_subgraphs(G) for g in C: c=[random.random()]*nx.number_of_nodes(g) # random color... nx.draw(g, pos, node_size=40, node_color=c, vmin=0.0, vmax=1.0, with_labels=False ) plt.savefig("atlas.png",dpi=75) networkx-1.8.1/examples/graph/erdos_renyi.py0000664000175000017500000000163312177456333021113 0ustar aricaric00000000000000# -*- coding: utf-8 -*- #!/usr/bin/env python """ Create an G{n,m} random graph with n nodes and m edges and report some properties. This graph is sometimes called the ErdÅ‘s-Rényi graph but is different from G{n,p} or binomial_graph which is also sometimes called the ErdÅ‘s-Rényi graph. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __credits__ = """""" # Copyright (C) 2004-2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * import sys n=10 # 10 nodes m=20 # 20 edges G=gnm_random_graph(n,m) # some properties print("node degree clustering") for v in nodes(G): print('%s %d %f' % (v,degree(G,v),clustering(G,v))) # print the adjacency list to terminal try: write_adjlist(G,sys.stdout) except TypeError: # Python 3.x write_adjlist(G,sys.stdout.buffer) networkx-1.8.1/examples/graph/knuth_miles.txt.gz0000664000175000017500000004753512177456333021734 0ustar aricaric00000000000000‹I®Fm½ÛreÇ‘¦y¿žbO]u—aT+"Ö!¢çJ¢ªªÕ¥“‰”Ô5´mcP&È„h ¤X|ûöïóØ©RÙ\({¯C„ÿÝã/_}x|½|óøôpù>þyýÿÞß¿ýìí?Þ.ñÛî?¾]^¾¹üöáíÇ—ßýïËýó{~ÿþñõíãã_>½=¼¿,ÿxùññíÃåíÃÃåõþû‡ËÓ㻇çׇËýëçïý,>ôËÿôOÏï>ú·‡ß¿r þã_ûÇ˯^_ãoÿúðüðñþéòûO‰ë]~׌Ë|x{ûáüÓ?ýøã?ûöùÓÏ^>~ûOï^~øééá›·zò»?ûðöýS|òoïÿÿüòöùm¸Õ—o÷Ïß¼||ù×÷?|øÅýëÃÿãïŸyƒñèyxxŽ‹¼ûpÿüm<ñÛËåþ¯/ïc~º¼{yþæÓëãËs¾9÷ˆ;}|ù>.qÿ~zzx÷ç½÷ññÛÇçx•û·|ÿøýÿˆßÿ Oö.ùÏbÉÿávÿ¿g»ü·/þû¥ŒÑþö·?>?þõáãëãÛOq­?<üõñ5ž’«Ýûp‰ëÝ_âc—ß~øñþ§×Ëã³—Žo±þq•·xdvó‹ûçû÷÷wqõmüÝ¢}ÿS,B<ÕÃïüÃc|1ÿòCÜ#éý‹‹š ty|ã,NÜîÿŠ ý·/><¼ûîõÓ÷,{,jlôëåÇ]ûˆ7ÙÊ_þýåÓó·¯o/?>ß]~÷?¿ÞJYïúzì×Rö­Ë¿ß?÷öüò—_oµ÷»q¶q-u-eÿîñûxö?ÿüëí8Ö»øË^®ÛèõXÊ^Ú¥ne]þüòñÝÃkÜÿîò›ø`­çÝYúz-G9ÇXê8¶KÙëz9Ömùóãkìîk¼Ë/žž^ãÚ¿ú:¥Ýõqž×º×²”²Kéå¼l½\ö±Ç—žã-žÿï/¿»üö‹¯Ûá«Ôx•Vzß—QÏËYÇ¥ž[½”ÚÇeÜíùùñ‡‡oãÉ~ñu<7oXöë~lÛÙ–xÀr9ú7kíR¶í¸ìûß>ß|÷a¾ÕŸ~þueܽ׺Öròö륎õKq‰µ¹Ôc+—RÆy©{,ÉãÓ÷Ïߺ¶<íV·»ó5žvÔÖ—ß,½Æ–X›øåå<ö¸Þϲ¯±Ríü»küòŸã ÎýîÜ÷ýz®%Ö„›–£ÇÿJ¿lm½Ä–Ç\X¾¶î—¶±tOO¯ù ±É½ìwemOÒBâ!â=ÖX¼r–z !`åÛ¥÷¿nû¥¬G¬ÑÁòô’W»ÿþõ‡—ow—ßÇn—Ou®ëµµm½‹KÕË~ÄJ¬±${‰åÒ[½´c°Pq³¸ö¥¶ñù‚<ߟÿôu;~×kíׄ±ìëÆû­—}Û.Û8ãêÛ϶­±ÝÛ¥oq=Ö¯¬g¹´½Åß}x íü—{Åë«ÿýu‹Õ¾}×±Už1Ä2—9d'^l?ã.ÍWlÈÎÎ?! e»–¾±$ýôv!XÛíw—û’hÍv ¡ém_Z<\YW.q®¬^ügÛ¼’«=ø¯2êåÜæî•3ž`+Üq˃ï‚lëz†œŸõ[|®KY «KXYË}¿¨‡-6p™gY㥊W;â—GˆAÝB%Æè—P“?‡H_~ÿôýå÷ï>Ü]þå×_×ã¬qu¿-Ä&Ö'ö®¨ ,ÈÉŒ;ÆÅÛ?•x7yÇUØCtWž"þ¶í#¥Çžcs%ã†Ï÷o¡IiEB;±"-äï¬û¹´Ð¯ô†‘do;Z4¾A©FˆBåCY6~Ç7Ô™Û¢lGYjÛ{ÜòáýÝå „Ó»Å寵‡N5$¨b$ä¨c):÷?ãûa2­3äi„Ùi±Õñ;Þ¥ïñÓ]¨Õ¥Žv–é A©ÇB|ÿÓ»/¯!mÿ–"n"ÜÂ2vÆ#Í‹¯$usóË:rw|±5V¯Ç [£“= ==Œ [²שnHÈüà–Ÿ^ï?¥õÜÂè÷0²×V7 3ºÚ£Æ—ô3¤²k ZüÀ]Ñqä;ÇÇ6¯ñÆ_>}ü)Ôì«P³=|@Üðž¥FØŽRu¶¤_×R×xãV"H½bÍv®¿ë~â!x×Ïo±KX&ß(‰ðB¥ UêcÅ%ÅîÖ02÷¯>»Êp·½œRˆf Q—Æ‚áÞÜ÷УybÈ—Â}cõ#º‰Ÿ:fkFÚõ€óÆcê44!Xñ!ÒÉÞ"úm ÿÿñãÃóô=Vg”íÊêañÝì‰ýEåvGд"T±§SZ¸+WÕÛµñÃYCNBqØc\såZ,éÿ!`øÛ¸Ân üO,maÇžžî/þ;ùð+…¸'b®ðA¡¾Z³-^·s¹sÕÅ”é½F<µ¾=Ü\ì’ÂÔPø!>ìdB¾.a©§ÔR‡ùâÇ#Çcr]¾qjVS·2 @Í1îß½¤ÛF¬BÏcáÖXØP2%‰çŒ­QwÆe°BZÌuãŸx¸W­b?Õ¶,ÞL=¬‡†¡® Éß± :ï iÜÒ,²x<]‹]׎9¹X_þñáÃóóCý_ý6$ޏåÜc5×ЦÅχŸÓöñg[¬N/DB‡Ú±Çrž¡Z¡Á'ž”§îÄ`®{Á$âq“[bÄC:;>]‚Bà$0W‡ÙE¨âÏøÁ?=¾{{ùøxïB†¶•Xȵ\÷õˆq‹50øDf}ó]‡ÈO%ƒÔžÛ"€)ÅfÅŸãyU#V4y9ÁKay‰gzI቟ÎÔñØ€‚Ù#*ãfÃÓùÈ¡Ñ\(Üs<ôw¯aUˆÖ#² i¸kï! ¡·Ë÷ÛCÜ6T¶²y~QoìÌФ|öqk°ûðëðqYFÈøÒ~ø15Ì—=@¡cqy¦x±1”ŠØ‰$¦YãXþtÿüîåÓ_I~ñE$‘ö„Ë,©±,áø¾²€¹Ë;–‚øœ_mćcuênP¸å î?fãmÂ@a Âj„“½ArV­è{_uë‚q†F¢nîa€¶-Ò9þ€òžnN<[¨ÀŸÂ<ütù"RÜLBP#N9 S´‘â°c5C« W/(}<´º…ä Óy^بc #þ¢9# ‡þði,ÆÉž­“x*7 ³ZPˆ=ÙGÞÇX?>¾7®uú0Û¢ôDŒÃã¿y%'êZ{¤{^Cebsâ©æúî¿D©…P¶°h )WŸaX1*<2©„ñÁÏQÍÀá®Í:v‹¥O–vÇ«æàCqàyédübCŸãù¹pD,`AµÃ-|{|w?‰R"¨-|øž*Ög®wl'Æ—%ìñPbÁy‡ñ$0 ëo¼_ÒÔõ–)L¾\iø*ÓA¤†H:"–° ˜ÎUe,± X“X“ Æwc#›ûFòz>vaŸCÛ4A!^½-|~|yÎHè÷$Æ‘]ã]˺láŸûaÐÀZû|Ä[¼ÿ¦Åel&UÃûo„?!R›yAbk ïqf2ËÇ ‚ã§Ý$a0‚Jߌñ m‹P»ÄÿÎ —°¼‘U¿ªâÇòÕOO(-.F–%rê}èÓHg-ÄÎS… 'á~—Å bÇÙõ¿wÄãw\Òînë2Œ7¹þ†-+c=ô#3Ü’Ë‚¢ùññùñý=°Éï¾ng‰Pf s~±ÿËMèCªÌ)CˆÂýhÚwþé¼ò.|µš9 ÛÌ*1ÚG°K¦Õ8±P9¿ÌnaÞCx¹èa@hdnì½'…{5D؉x˜abŠÅì®n3¤ÀB‰uPkð»æg›ïúðœxèÿú:Œ{» ?¯FLQPòЭr{¬jGXj¬{<+©$ðÛØUxü´êGnŠæà´Šxæ°ù0ãâË­;^„Í|©-=†±­‰\ï‰г5@¢#’Ûƒ'T‰#´å‚M2p>ŽÌôÀP0u‘šÍçn£ÅkÞS¿x˜ñÝoŠÎ0:f\CÔÂv.qA]PÍfcïÈ—†K¹åÆŸˆ^¥áNâšl.¦w_E?ð1!ƒ8Ò²ó0b=âZ{¦ÒñA…-]‘mÜ<ƒvl¤°gÊÙ韵@*bûQ'ÂR€òz‹Ö#—]¾zùHPöè·”ȸ[¿îcÖsç ¡J³©³èæóòñìGfÐÂ'&‘«p­‰=Ö‚ 8"³;ªÂ¾Í $ž tŒè&=Ÿ%SŸN̉1ÿ@ïA´¥‡ŠIB¢ ӛ Y!ì-xËÓ K‰/&ZOüoüÃÃw„ø8©ëÀdôsÍ€¨ó 0·Èè©_‰«‚ÀiÝ·3ŸûØGÚ–Ðâ…4"t#òÌ|y N™–ˆ…j†)8ÌsÕŽØ=¢›b4tvת-­ ÃYž2TXï126æÄAF]lÕñ—ÃkDr ½²ÉOï_f)+ö8žo»ÆÿBïL9ù=2´ïÛ ¶Ø€K›8OFæbžÌ$$ ¯p‰¨MSƒ|7‚ô?l´xYµ¥"¾C4IŒKV€…}®·áÓã.x¿3*VÔ¦ "ø.(@GÄ@RK<®½Žýîk]¾zøûßÝ?ßÏ*ÆÖîF|ëiÍi, 7-ðZؘfÐB…ÝKcG°ö3x¦Zdò£Ú¶b9…ÀÏQ·¨F¥ ½ª½’`ºÂœbð´kËúWŸ6#Ô€ãªH7‡°˜Æ\Óì) ,€ya/z>$‰íÒÓô†Œ?|üøpùŸ÷ŸÞfÛàV®GD ûràìH¼b“H6b“½$¼º³‚úWê=7£¸mˢŋ,üÎÂ*W|"6‹Ô+f5¸<¡h‡ üèƒ}ÃSnBŽ/èrX%Ü?ñÏ{€RáÎ7VB¯FBEà«G6”HJ˜îûï¸ÏÒLDZwÄíרòÈ],õ‰ô—ú—ÍüJ$Cľw‚n5#V2­¼­ ¦qÙF<¹šŠ‚íHNIôg˜´n¦g†Q$α‹ÇLEüŒO€‘ÈÛ—QÃŒ…2´²Šb ÈB3=Kj†mZÃL{05Ñá ´”y?(+UÂñ2AÅ‹X‘§§û÷¯¯Ô‘b]ZD'w}‹t¿ƒ1ƒ^ê,ÎñîÕ&XLžŒÎaòZXcL„`F†ÀÕÄEK‚T‡áÇ9ˆãÌ»ù¨æ‹r ÖI°ÆœH°l7Ö-Ô`XcÁT½.öì=2ïm?ÄH7rþv6‘#Æ?ìæ[§ O š«oQ¬Ä‚¼{¹UæÏºQåÚðëÜȸ[u­°Õ*ü„[ŠØV[;ru¬ÃrZÁ¯±ÕÝÇÏÒvÝJ¼’Ò¨ ’+þCzA¦àÂÑçÞ2úˆkPÑðnV(ëR5è`Ê£ˆðâ»ðåOïß}z}˜HK8ÉpÄû•5r®ªA+šü‚¢MÆ~nÞàïÜ);–Yk´dÔôæñÕÅ%°%ÑaŦ_ËšE¿PI0·>‹`uÔ£Q†Ú%\:@ÍgÉǨ3C ª¨b=ŒÓ0d圀”aµŽÕµ¡@½^LÒ­²iµŠ>ªF4ÔÏ[]¾üñþñùõ/0%rV•zåv “X¢¿*8»¯Ö멤£îݺo ß"f)WGÂýxøPù0-ñT›6€TØöxŽ$ƈpo?,MG´¼i2qC[Š­š—p{¦]IJ°ºµ§ ª/†ø~€òaF× \CÉ—Ýbh,}Vçq?ø®j¢Þb…ÎðÛ_~ú^¶Ç—_À "ÞÂìn±‰ A@÷Ʊ l-€U:|T!èd*§9uØ.p"!C\eÖÐüá0{#”ßÃT¶ÕҒѡԖW­”Òáoâ±,¥5 œbm‹5m †̓5w5Ã3Q'ÚÃÎ@LÀÞm+–\ ]?„žâ)Ž}ùòíã˧÷˜§ µŽq^v\#íËaÜfå),À8 §¼kt{úàEJÛ‡)ÛšDø¢c¦ÕVŒšXJµVˆê\àeù²P›Xzg×j‚Ìau#?Q¡c›\PNµ± ›êv6›ê¢þ ¼œçÒmo5£è ˜”8U[‚pð!ÑöS[P…®c^Þ%“ê‹Ð¨ðDacS®Àž'(q"D,kˆá?ÀÙ 9„ 3+1ÃàÇ‘')BÉH0~#Û¨‘o–{¶4ïX“bvË!IF:¥}‹¢Ý¢Ïf•dz¦²wŠžV´R¹Ê»£±!©¤ÚµëBÄX€³¤“,ú*x½X{v:¤€ØAb™šH CD\Jl©èLw«÷ðׇç×Ëï_Ÿß&§aŒ÷¯2_AD‡ÕL¬ÑõÈ,­¥S’­‚o?‡T¼„YÊ»H þ†H Ìé.§¥eàœ¨-ªƒñãV^ÚÄ>Ä“lƒ0I+e˜·åÑæSji 'ÖŠe’Š×¦@®"xZ¨*°ã#CñØÜxI7Ë=ŸD&MD~$Æ,Ô§¿<<ÿõñéé!Ó F¸Äkly8¢\Ò¸(-o’ gMæQ¨XK…¨¦)=nÕ¶ uÂÁ-€Ç$§Âº¢à®™ÍPc4ζ*fý—ª”€¿OŸž9ƒÄ–Y’”Ø¡ÄAz²B$¥àà9@ÅGÍgC=1~œº,§rc9“å´YN”0?³œŽ•uú˜ª/~ktT8oµ^) `RÔs—¬h³¦Ÿp .» /·×é€(¯"º”â%K”¬¿w‚xwX[l=ޤ_ë,œs ‚‡þœüiŽÈÆõÇÒÅåÏÉoЧDÝb˧ÃwbF4"] ‹c* ˆRº»£èJãÕ´•3kݳ¾Eެé·ÚÎÆßë@Êî?%BñÆà®í)Ê~N ›?À@NhMXjª/½“Xd#BfgöDG7ƒ†ô=aìâÚ“h ÷ŒÌ@´IÁ“~z¿%g¨i,,¤{QzF=¥v‚¯$Ö/ëHÔÙ?ÊŠ@RF]1§b¤MÛil‘Î…ð™XËê8"2Hú!®j•;U¡åË>†¬}óøðô^¥lƒè!îz†.¶Å”iÒ.ìIÈúi¥ƒ@)ñ$‹2¢ºˆ8ÏOž0r¼­¸z/!véYÒE­å N¤³[­„_Ö2±‰“JRʼnšTQ«Á42Y…ÌW¹yø¤,ŸäÓú-QraM)ÕÑ·ü=ßOܦ׬áï@T¥ÿ—ŃVÖ»µX¼ðä±B‡ ŽƒÕèfÔp2ó',/Á λŠ?Yi²Þ¯g µH=Ù*Z—JP†p–mOÀ]õ%Ob™Ïr‰–)º¡ÓÒ\É*hâ8Ǥ^öƒå[U^RD@]å›\8– B…¨J©dB’»‹@\V-Ù‚&•¦`Žä¢ÖND z# Ùÿ‹òþêס¼}…M ™m…Ž+@Ü\*8â¡õì[ÙEôˆäYÑ.Šk¼°Š¶%ñt€ÇqŽMg_ &5.Xa*©òûj¥Ù:41ÂÙ]0f7a*ùÝ€¡¢í@ŸDX2 ­UzZü÷RÝ$â73 0k6•#“—hˆýD[ÛqØ”9(ºН!ÇUxvÇEÊôÃËw÷Ï7jï£ pŽÊ(Yq7ð³ÊC¶µöu¢à Õ°MEÙ·#3Ýz–“éiFæ{Kâ’ÂEh±Y~Õ*X©•º(„|ËZØneÚ/ ñ{¸ŽÌŨÆÔLÜQdw˜Xc¥&‡_ðó;þM>ŠR)¤!®F„f‰D.¾%“B»Â:¬ðþ” ™)RÞ3©A‰4ï®ÇàbEEÛ‚ ‰¸úË—Oo.¿xx~/¹\¿ëݱ'T8MõÉ()É æ€Më×àu°à’mA"A˜H¼Eª[_’«‚8ãGBo!lávà4Z³K²™o)>dQêjÄ[XuРЛ^ÌM¶ÄŒ… ¬î¡ëV\r Ê=£}M¡æ•¥\x7¹Ò…Ø]²ŠA/Ä‚V_WÓþ%ÈÏ9™ôàsLÄÄÆŒø•¥­!*Ö„Ñ%€g#†™ë€>CÂÌrüšTÁ,?'&@ u<"œM(‰lQø^¥ßŸ¾‘å7nµÛçœd:]b¼ßb…]s4 ÉBG]ÇX¾üð1räl»ù5 Ý^"\:÷+1üJK£Á,õF"ÚÈÍ\¹¦ûç­„W+¿ä»Ž"+$·IˆûöÌyx‡É)See¢&‡éÊÝ#®@Âá×Bb?dñg=$LI5’ìžÎ!á/ó¾–^4wBÌf×µñ4}fæù<·EVí´¤ç"äF)(¦îI6o––SÚªPå" k(3$ÙkýLµ|§m ñááã÷´7Xn;0G¹¶•šL-‡1ÕAœPèì›Õ²uª} ‰%î •[CŒ|ägBu+ >"™;gJêâ½ ` +aèsS}©!˜Eé­¤1`£ÃÑ…&­I[<3‡Äº¯7«uºç"¥ö*¼pŠ>D?2°¨ë æ²`'™ÇÚα%È]¤þn’¶)û“Î5ËÄØã$­!ú25(‘²ÐïYé?ÿûב#¬w¤Ž+…u²”" ˜0„ÎQyˆ!Ö11£8IlÖvŸLQcyˆôúE\'±IQ#ý¯‚WN!A©Qž00#[ I†4¦¬êS•Ý5ñÔîØ+«Ä(JÑaq«ãœ= I«ÞLÌña¬º%£ì7DxG•\¯Z¦´Æ^) ©jòÿ}»lv’Á9&;h繸LµdDzŽá:yD¥½lO` \ÍòåÃ÷Ï/BHÿöu‹¸Aï×H-k†¬wcÿ83š,Ý|„ÈËî–j=̽ÒÖ Ã#9J}I¥Xì­%¡ŠJXd`%·–4œpú‹Y™àIÑ#öôîëÚû–Ì6 DÙ&…*QÝóHüÛNdå*;,µ©”VtäŽôí¤ÐïÆ‘ö•)Øf·™?%wW„¼U[mt«IómY ’s!.Jåæ°2PÂ7><}?)ŒƒÝŧì"å^Œ#À) RÈÃc%¿l—l¶0Í„hÈhMmËXE2ñiKÒ:Á’ [d¾•ïEl âÈræ:ŠB©¶ýXí]Y”Z)·ã÷Ü3 ‹šøoÂcYÙœlîÝ®t÷²`;$ î™K†êwÉKDÍ“NžÑ›lw=!ÙkAIšd…mÒöìSùjvž•å<'/<[³“©Í ³ )íÕ‘×À„ÈÞ´¥Âö%+‘¶‚£éQ‡žŸpk¦æ&@@´Ú &v~dýÝËÛÛë_ž>}óÍÝå·ÿLàiÓ}8›*5SRN9ͺ5º›%û=× î·àäêr§9ôhœN¸ýRp’Í“¡¯Ø¦¹†5[&¥ëÛE‰„ݶ™H6äÀº‡}°eü=Sz[Uqg¶oÖìw° ^à&A'ë€a’nm+tŠOÚÊjÂ\nîZ̪”lg1(ÆQʦf— 5L¢ +vÁžE>+ Á)›ˆçÀrìÇ$x‰n–V aä,t·†¬”€Uï><3Çì ;o@µ`â¢sn¬¹´¬Ø^4 9~…J<Þì(8RŒÖL]g›ù6 q¢dF»uXùbOM˜¡$-_ÈŽ§²þp¤–§^q£† øÚ5snKhðòåý_ï?L&ÊÚïz„%¨  ± :Î’U+G²/>»—níûì%¹ô1OU¢ÃÅVß}K8Ü–ápök˜eÓ7Ùü€ž°hm Óì°Šì9ý5‰Ä4YIp<Å‘´zí³y·™ìÁä8;NŒœ«…iŒnI³’BÖAÁ¸­ T•OK¸Â)8Ä¢Îl—è™óÌÖkmoɹ3¨ÿL@ƒ~0(‘EvJ2÷ŸžÞ._Þ?>¿=\~sÿññ!Yã~Þ…ØcW6ËeGŸ¡¨„¥þ"™¿å+euÊxÔ¦>Ñ貺Z:Q6óñAÅ„ÚÛì5=2‚…UU¤ÙŠõ>½×9™æb»yçžµ1tUûûxdû@-nõqËDVó&• ú‘Å¡btAÿsxeŽC\½d¯GMÂÞšðjßÊ™tPUaÑD2¡;ú„`õl3±kGö0à£ì¼²³ ÌMcÖ VqP2«¡F4þòá\lÎÇû×—·+´mwD¸Õ;Cv›ÐˬÏX=“©I¾Ö¤’—,¹½HÊZ§Ц›êG[ähÊ…Ÿ«F3˜L£}êXµY¸Z%¤X³›¤vÑ–~éð„æÕG"~!m7t«È0¾LIÊU¡ÌœÖóFäÜHq„$ŒÑ-ç‹°M*?D‚mN¦Ú¦ŒX˜Ó±uuêÌN•”¢µ7A—1Yû™¶’ª‹˜ÕÙ£ß옚Éù(–—¬A ƒ5;öÄCàb·žßî/°o BP˜Râã³^{+M›~ïº9•X„•މT‚&U l¯Ñs‹2ˆ)þiÝíhVΛÁ3 QIqål oTè•2=(ÚHÒ7]‹]a¸س—ˆg¸wªÆv›’"çÓŒev6ŒTø&‹\¥í=V~¼š3,ú| jfÕNz1¦£vaÅ€€¤&‡Râ°–!ÕÊ’Sª M¸ `Ÿ ùp–Fº# %Voô$ê™é¿z¶C{›²8$…ªbª¤ý‹Ççíûx£¿ù:BÆ~Gf´‡¶}Ú›‚I+¥eGávç–xW Ò"@H ?¶í‡…1‘ƒèM‚ŒÝ4nŠ…gäÏ)ÙB,u¿M0Pйîi×kViji jÍ!’˜¥÷c¸”ûh$RÚZ¯ª >*è Èá¶o¹V†~Çì¾fݲ/ÌG„ýäFa§˜¨Â6 )+‹eRšmP„ ³tÀx@™»©±v6šõÙ‚Ô…Z7#ýk•èRkÚ0iµ·]ûÅýÇ¿„­L½Û˜ÿCáðz†fG@ao‚î\!Ô{‡à+áÖf¸žä_¤¡ëmʬt$+§~¦K‚º™V›$ÌÔŽ[›®%7AK8†ÕÄ“0 ¥’FT¦ @dì’-–ä#¶7%˜VfO pµÕPÙ"]ÒÝAN¯8JölTâ¢N8ŸÙÛ™¢žÓŠmÙcΗŒÉs×íl>ä6ž¼ êîNWK—ŽÉÂEV©™~S¯ÜV m-}¥Êã,;ô*µ¾ê™ÞEiû©9HD6Y3Ü·mîçÏŸç^FÊMy¹ŸW^”òÅÔ=‹éñHG&Ò•™qÕfÍwa¼Ké<®ìa0¡w€°ž]^Ĉ9€!½ [&¤ãªd'î:åÞ]©R×±¦&èí© ËrJd}›¼ö]]Ÿ½êVÓ%íÖ—‹c d”´KÖöž*RAeÛ&%ô´—Z+´ÍæØ"sèo‘‡d.{8&Ÿ_ƒ/Ö¼-UB,u³êGŠ~ÈfHâå-‘!…-Õö»Ð²ØOËפýl÷ª¯}²<×õÖ‹,…>Ô ½ü¯èø’d>1Äêz„‘gÄŘ쫬ƒøÀlª“.F™XGÛÜ“M2j’=D ,ÅHæ=qÊÃÀF–‰x©(HRÔÌGq]Ò jÏΓ9š®bÞyÝÁ‚†J°–Z븀U±PÛÆ' ÝínÖÉ^ÅB tâ~0ã)aÖ(pç"çâÓÅx‘ìT¤7áêuî D@<ß’“’lG÷ö¢øyÛÁˆ±m ¹ªâÿöVªw9˜G#µ²yb9ûì›5W“ÅÚ€r»X5šÀ¤ vó_>Þ?¿c ãÜÒ³ÛˆR#£³)Þiù±åhDÂÐtèUBFO662æ4Hѧ#¦èZÜqf­ú> åèCYç›°ŸÆ²ÊTaÔÔ Þä0äyÁJCyÖÆ QQÆà‡Ãn‹U­æŸvNÚô©ó;Ýo“<¼J†ˆÄÎJÊ™ÙØB[gÆ@mºßÕʬ0 \ž±%kÛ¾§-ÁÑi‚aI«Èy²¶u á]8¹ÁKôûæAj§kmÙÖÎ̸ž]YÇm“¸=TDc_ãg† eSßzýî§ÙcI“UHè•Q4t òà4AÏhÏ™5b¢ÙIÓ„Ézî³ðb#k{"å•ý¡ï’È‘'–¨·œiçІ¥}œ]fu¶ì†0ËÙ"äÁÒCü‡ÄÐO@…ÛZt‰fŽ£6¯Ê žÏ©XBL¡t nðt›~Þ¡ãÀIÉÆäUotq(‡ 9·°:isè þŠÜf8'°IQDD×ÕþîùƉˆšs1‰”ì2¥ü2oYæe·9#*üÅ¥í\~ùøðíT½Ø%¼cÙ¯a£B»²"!ôI?!:í¼©TÔêžô£1~9„ÊJq»…¶ÕV6Gaµ”û>w*¦¤.þUUTEeõV¹\~W·,‰1öo9Žo%ïV8FÝhb_oÐ2BƳÛÇ„Ý]s$J\Î< q²"$g¿Œé ý§ÛS²ÖY¸‘cX²I€Ç)ÛÿÁ¤ˆº;ÊÕÄCÔO;µÞ’g(UÓ.Û ö¤|L¼¹ÜB‹ê®xFM¿©Õ­“+íQ‘HfëmI²½²…ÒöÜà_<||¾ÿøþñyîr ìr+WàI'®)›cK`E àAsòÍ}fMÝ,&8Ö¬œÙ13Kº‘›Hpo?[;ꄌ;T>“=騪ƒŽÄÖòDè ¡‡“æ ³í’ô—SÎQxA¶$ÛÉU‡2Æ>îId‘…[$¸cd¿­®"ã˜@8%öãÓvNí™w}º]û¤ÞÆéœ3R±’­Ïñ•ì*´:V|a+HŸK¢]r@Ö°dB0Á´:ç†IÙ~ÕÙ‚HŒ`íÀíÊhÒQofämœnôÏ©Ø<æÜ»0üŒáÚ#i釮 t·™ä9EC¤â”ì•|ãÒÛÌ8‹En;Š&™©å¤IòÅBkKΗ€‘ðXom2eò'ª\°Õ66ˆ/V š©è‘Ù ßl)² ¥Ì¶=³¶ÊX¦á˜‘GÚ œ Ô³«¤È—ØI; `IH:wW\)¬5›Ð½É1$eAœu·Ÿä:®“¹ŠLÙíÏy/³Á*E7/ŠŸcBÖÛ°”íoƒÎ%³WcI)ëÈæ€Ù â|g$ví&²£ÁÂe¤uü­³œk¸E&‘Êv Yƒ£r+ÃÌá_æ$¥ÏV'p2ud×,;íÆ±®ÖìSjÇ,1ÇÑ&>óâ]ú¦œ5‡N☤!Éð0XÉQh{Žz¹qIÂÇ/@øZÑLáw³|93jCn8²±Í ”ɨM4žéul1ížMu]IŒL½‘H¡Ÿ³äÓ8)s“„Ú­‘ÃI¥þÞZdµ’2äÖu™(&Æ7â,@ùá²Ístav©ïYvRן±ÐƒáVñ§åºý§³K±’i•ܱ#Ù¡©–äNAÈ¿z»üúþ»Û°•?~õu¤Àä«ä7 ¬tÈX¢¦px %Ûƒ¡bWŠöŒ›*9lɹ§ÕQRôÄÐnÍ\Êš’!XoOúÙ‘øÑȯ3 kÎoÿºÊbÀt•8Ò2Ûœ’sØ”U Ä´ÒE.¥Îr䘇)j¤‹u£„†"–*¬öIÃ÷ Ïj³ù´|Ξ_ÏX£@ªªkˆªç¤Ùòó÷É|‹›o½Pœ’/·[‰0›ŽÉéQo}p \·w[®ÄÑnÕNÎZ'Éé®f¿êî1‘Öš=Ü„ÌÄ<ˆCrþ„%ž_söío~ùu‹¸•;Óîà E6 "6›([öBï¶çBµ±Û¸Þ‰í²aLm¡ïRuˆdjIà$[´cv ¬·™íŽŒKQ&´&µ,‡Ž¯=‡$»?L‹¯kvTT ÆôCª[ÓGÑYÄÀ’#)´–Fèap”å …NUSvDwgºÚ¸õ}Òn3Í®¥ÙÙH_‚9©¹£Ù©Pì—ɲ:u¥lÒA{º&"ÏFU2Â2Æ¡;0U†4ÖžÚš Û>§f9³°ç thS[VÕ"`ʱ~–ßûBa‹}¾ÍHë ™êÚWf“6š¸%48ªëtŒÏfSÝÓO28¤¯“ú&Ÿ%Aðm‰›\’fuüh‰›P%qlÓ¦©“Ù¶{æ´„jI&‡24ºòúÈEÝQéwÅ”$‹¬­ŽP8'¾`þJLPwa²*Eý‡f“FÚžóšè\HkÍüÜ‘pb>4åÉ/u|~Û) ‰¬2{δ Ù÷ ªBÙ™ ³†ë´8·f*RA±ëHG ê²Í©§=©¢v\•$õ4ÂV£&2w=Ç(…Á¸‹*× O·8IE,ÓæN[ý·™‘ןío!AÍtØ)ê÷Ï¡hÝÑV=)›~Íqp6)íÚ2Br÷‡C¼å° w:à¿2`Š2u %‡O@‰GÝLP®ß¸¦9ÂÚÞHÔÎîüäÔ2zÝ0A½oDÁ‘sj#¡zÍ*þ~\&Î<S”o0êå¾Sµ'q9=«c“2g¿³Sг0 (, ‚ˆž&OÃÂ3'[§…Û–§ p ôaâ#£ýùž¥Ãž #é!΋ZcÙ/¼¼˹+ ´‹¤†ñº^Ca#ità¸ó5Aõ çN‚üv¦õ/5nBNIä7þÞ°ÒyíÆ>Kº'TY²fçP€Ÿ:N…íl§Óu$Õ› É×>û2L·)þÝæDYä©Y8fIº$Î7&´i ÆArZßT=˜,ÄŸÖE¼ù~£ÜÙYa¥ŠMNß*³íø§çî™ÑÝ|¶¬°îvâµ2‹«Å Ûä©#ÃhcLvÇÚ”f;7Ä<Èœ ªÎPû1è1¯ÀÂ@‚J@»Ïšóá|Î&ÈëSœsˆ É…¸/ ]ól˜ßý€–C§®!äPÕœ±áĈ*Z³«Ç2( }–«ŽÙ銦c,¡êçÈq‡5íËd2âd•†!mSXp³t“c‰È )Ö[»‚öÃ} Y^ÄÈÖèu±±.3åd³`®!Š5U`t8CÔxurõÍpP N@sd¯ñ“$Df±{µ\=$BwsݲUœß–©ü ¯Q¯”.VG¼ÆBfÀ«”Í–ÜMÇÂÙjbÖ#O²9§_/X®a\Á¤)‚™ ˆÒL[­Ë vPPõM›[Žœ—aR ë`±Þì!l³ •x‘ºrùýý§§Í~‹ìwd®W*ÑmÍᎶ͹:É*9è’UœfG¿9EYRÓ I†G†ˆ¾Û܇¡ƒ‰[S‘ +ÅŽrøº'ÝÅ “1s·áÍÚ>–yÐÖéH,ê×u6`üùX”΄‘á“çÐ+°æº¶Yl£½Mr·§œÖ¥‘\• _‹}¬BÙ@U…!„K™¨&ëp×paÄ /Œ¿.986§1óŸ–‡$¥kËÁ,¦ãŒ;ŠUJ̲ÙVº&1Nê›ñK–É3›wúä±Õ)_<½|zŸÑÛ¾cëËyåHgÁƒq¦3|Ùçü…Õ;²©ÏyþÀß%ÀÃz"Xë @žèžÂ(8çŒ9ùA‡eÑȾG®YËÎ`OŠÌêÀ™…=7&fÈs .9YÕ‘\säGƒëeLy¤•í®VÜ’ÉÓ’ÍŸ‹ ­`j-y¾“¼˜ûf޶ ÃnçÂ=’k5Oâ­ë><Ø µì8LÆkó8¿u² •±Ai– Hb,s$ºÜgsnF£N€?{¾5‰\Ì–û²ßÎR›¢ËÚqdY)Ô•ÊU˜–AÊLAøù§o?½¾=>ç¸W¸8w½pj`ñ¤3ÏU´·¿ˆ¦PgÅ6û“ÕÝk’ÙzNÜœ‰®öªn‰]"6óG'Öd3Ô#g[8Vp¢ÖŽ™ôi#Æ–|­¦Q*Ž#ÛópIÇæ9Ì!ç4JÆ´WÁ"lës›dAÔìS8“¿¹ÚÆY—zæQD§Üò[Nv±®¨J%NÛ¼ògàÇâßÈSo8…`I¬î 1Hò'FŽ™KÎƒÜÆ=!kF $0ÀØ@± ~¨4¨Ù ’èS%Ñ2GÑ ì "ñC[6dÇÔ¤S‹ßPZ8øê²²°;B¨™w±áÐrê9svö$pØï0;2ùˆOoaðÎéI|øHÚP†êrä4×=kþCZ-E›U+<%3ãÊCµÙÓ½eBõ¨WØÕ’©L„<ÊFîBÍn²¬¹–*+Ò©¸‘>.™`Y´ËäæÌO˜Cqö¤Îà9“šÈÄÁi3Õ£%mwÀ8Ø á3ûL­˜7©E7Æ¢\=';PY«’}ìº9ä Ë>î ’¿Äq³ÒòxŒÙ±i F8r|?äuˆØ"õæ©Ntëò‰Q€ËD í”ÿk·˜cܪœ ‹Ïd8ùˉÃ~k)pÖØª ‰íp«핤>¶YLò„õòÒÓã‹Å¯‘4â,`XmñîžY¤¡8ïwLŠ~žöE=‘Éu‹Þoê)‹x8FÍŽär£Ã2¢‘~šåWK]þðéíéž©I™ E9Î+ðþ±h$êÎs?•kÞyÒ”TKL€õ Û_±¼[R“äHƒÙOµÎòD¥†ÙÌŽ ,ލ}ÎIvÞæÐHGÍä°Öƒƒ^Ugûœœô`iRÔfd¾š…>i-²|r£ˆ‹Ã:(u1ÀM’ïqzÌbОó•²È¸0ã!ÒͱuG2Ž _xSf•žé¯ÅL÷äéÜâ+¤‘ý1U«ñ¶‘S+š‡¬mö&}qâB—6N´ˆ51ïé£ìrƒe8ÏO0}E“÷š^Ú;ïD)’Ž’ËØÕ3c–.óí̦[Gí)hûz¾9l,xyýñáé);=Ú¶zîM»†©ŒD<§÷µ[é2«²§lÅlÚpl…O Ù³+GIí5ñ^ƉÖ+)’&7‰ˆ~³“`(Š‘Î5“Ì\2ÇÈ~>F}i_ˆ"mä$øðÐ:æ7 é—9“ ñœÏNE[^ŠufÅB”Ö†—ÏÇ*'ê»K<†Ä±ÖÚ­ì!«q2é‘ð’pƒSäÇ€Û3èÈ:üCa‰Î<\ ¹{0¥m¶‡ °hà»mqž§0œÝ<§ç$þÇ)Qm2—[žmeZ§æ¥­ÖÛï§k+YC\FÜlÀ~cp¶yÆFBàÌ‚»f*É~é 4V¦{”_K QM>ó 7Á´cOXÎêé¤{d ÌâÙ–†–³¢l'Â`Ð+Èö1‡hçìïí?Ë‹ðµÞ…Í9®ûÉ1&,æ}L)Ïcè GË)[i, o)F=ã (&r‹§®šX `; ßXÀyhvÛ†.“Q¬u˦ô"wA¿åy>(ÈŽ#Uê˜ À™£â[vNxÞ»G£à9ÏD-\}ÎsË jQ¢’=X“í8N 9m0ÒÙÍ&»f¯¾,Yù4‡øâ¸S³*t}ä›ÓÝ%­†:G'¶<˜\Âî;ibçÀ¶D=‰È¤¿9anáâP„ÃkÛäåü;ŒÎÈSŠ³Û´MàÂØ›Mâ…±Â#Ïl«}CY0µ6ÐV:5ÕD¶%'fÈSa¹~ùî!ÇüGjqw†[eìuŸÃÕn.k–ÁíÚ§Còp²\ÍÊÕñy´úYæÀ±-G’¬œÞ=öÀc6s@ò”õ*Ó=‰RÀiNýÑðø™2²;°:zÃø\ ×[žJ®ê™µä›ŽdõàIZÓ·œíœwâÔ„èf‹›<,7oò ¨!*úX üäÚ󔟿Zr`6|ÂÅ)2Ô m€ÄjÚì 5tuÃF²¦å®’Q{Æ»ÕéМ¥Ú‘5Ï;”£#—àÆ‘ö@Šôs÷< m¡H¯Ñƒ¿xâê–Š«§m±dÝcÔ>3[<9‰¬©W¶ãät+é]H­'±zÖ°¥%cïŸ?<¾ûðý‹( ’²“óp$]è¹G`ôË<#ç•”‘*_/¼Ûb &ÒˆÍ1/,ŸSE+:C¡9ÛcÚd~y,îœÆ“$-CVgâf”ä‘UžÞû<‡F:_'ìecW·ä½¯IÓõÔY<³œŠêñBy¾—|ÊÛ²–‰©V"8æÈ@‡2cœÕ>>Ãß²r°¾ zNP¶‘ˆÀ/ÌM‹#¬{Fø»H»ÏžùhÖ¦qJ½æéèt-ƒFf¨Ùql‹üUØd—Ø—3ŠÑQ+ƒ0©íüÓc«= Ú!—H¬\6ÛPEÇ'S¼åɹ©ö çá én‹£"Íš½óÈa'%hØýâ‰]Ön˜b’G y¾A6SÒs) o:z;`¹&'P­4W(7(P M)‘´£/ ÁÅL×3OÊÊÓÙ¬’¯P%ì0·î76êÍݯy²ÙÚo-D “¨©9y¤'Äh«Pfk¶R˜ŠK0t(7±­,IÖ!}Õ–yW™½0ÅF¼cÏñD9ÛÂlû><>?P (æh±ýØ"·Þêõä` G`eL†Ö 0™ËƒŸÛ¤ɽÎq{%¡Ž9¤c5:¹ˆýÁÚ8 >‡t¯ì,®5¹^UÈAšÉvã‡Ès¨’µÖ>'”Ö¬§œ³7ÃÁ5OF/9ÂMsš+gÈt)/ZÙVŸSÄð«F>L¶‰¶[çó$ÆíVÚ÷„æÎMá´SPêî‘–-œ5‘$üþ,NÐFkc"°¢'ß`zbU·H{“±¨‹ –I´ ¢Œ@± y¸ð/Çí Ž5ÛŠzôb%Ó†åâ4íö"²9¸žgùÙšÜ`¾zÈk³•b"‘s ‰L¢„Vþ, ¦”ï3†á|ŠZ–?<ЉýÛ?…7Û,Ô ñ2¸Ó¿wGŒìÛ$]Ûj§Ëžn'9öyN»,mÁ#RéêIÄçA H°Ëqá¶DÉÂe¶åø2q¯f Ù˜óðÏ/œ§qk8WÏøÅ&쎟@|ÝfóS2¹O+OÂÝ™»-o²œjË6Zúu´†Öœé¹±È †Ãcç°p«$=»ˆÒä˜XJµï UÉ#Pöyêáø©>g8ßC¨Ç.Oñ´éêÉdÛf9 vg‡º œ}Æ`üà´dˆ;J_1´M·§QÏɼß’^$Ò ©S=W`»übñKók¤iE¯¥ë1­ÍѤ¶ÊJKs:Ð]3žÎƒ=VLê$ˆŠÆN°ÎY²)s\FHÚ·¶%}ùo_‡©áüº#‰ «‹ö6`ë,n…/ Fà2- ­èâXO0¬žf&^<ôÄc9s²è:ékN¬ß­ÇÄŒ²qÒCÖÌ`:êÀ.…9ûÏ€-ºÉÅ?6¶îyˆñ1 ä4‘Å$¶÷Lù屨’d#v¨Ë¢§ÖŸ²ybà<Ê]Z€6ö0É’ÜS³$å-{ìç‘Z‹£Q'ãk/Ùm!J-×=!ÏœW™s÷„~²“ø˜èt÷ìÍ,YP|;’`ga±3kÝ3eÈcK]Ï…/î3˜—Ÿ‘‹Çý8ªHs:C{Ë»›ð÷žƒÃJŽ˜÷l Ò°ˆ,–[Ë ÷· ½g¯ê[§ ¯å†<øH¡!sï/¿È©Å_pÌkqšOÝ®cã`Ϧòäìk.-iXÆ7šÈá¯è“㦶ËýXs²ˆó—œm3rþ¢á «b¢·ÊQiž¶žÞ™}9ÜÜäÁ最ÃS{’˵º…k ¢fÂÜF*¤T «ñI—)Y öÜñÁI.NÐsvŽL11M'ÌŠVƒy޻Ʉ…sQj¼» ]!o‡ì¸<,´gónµé²'ÑìL\ÌDoˆˆ‘’Ÿdûù8æ Cg¨ PacSƒ"û'›#FðX–hó4W=]B±™€gs¤Š!ºÌ°ðÝË‘C¥¨À»8gÕ·|z{Û2­È‹_³i­9jXRŸÄ‰s‹`L³Ùä. æ6—Ü.Ò2Dt¶)qò!Ž0cpŽS½ïYž-ÜÚ]hVcö‹¨éPÄŽœU0Oî ¡"hj2†NÙL“à°õRoø+|Ôy®½ÝέäÀj^KF”èìç4,‘”z‘3Ò <÷p¨Ç1ç³› É´l«!u ›¬”‘ÇR7ZœÛÌš™çW+(göã8Âl8S9G(˜8)„@ËI6ʇŒ<«`§%ÊfÉ5:x)¡ŸHZËlG6è‘è{ñÔ¢<¤±:[w1ô—(iºgëŸ=ò›>8åÍþ’ÌRüÉøð°±“Q”ÎT%Ü2¼aXgßEpî¹³&íÇCp"8ËãiªìŽê)™åv' ál3€Ý7‹ºä¹5vdÚý_žñŸŽ±*Ç]ìË&yô\Z&e"öQ›É.D£æl¢>Y|u—Ì£ž„ÃSÈañ(æ2)Ôu•œrbåÁì™›ÿ˜&H%_ÇÌÕÜŒ“@KpR[ [žIj‹w‡p›àÉ!Ï<-QrjDZö%ÇçÃ-ÐYIo€˜Jˆlh8›þvÛ®vivÚ;[v[þñòÏÏï//ß\¾y|z¸üÃ÷ñïëÏÞß¿ýÃòkèV;Ø£networkx-1.8.1/examples/graph/napoleon_russian_campaign.py0000664000175000017500000000622012177456333024004 0ustar aricaric00000000000000#!/usr/bin/env python """ Minard's data from Napoleon's 1812-1813 Russian Campaign. http://www.math.yorku.ca/SCS/Gallery/minard/minard.txt """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import string import networkx as nx def minard_graph(): data1="""\ 24.0,54.9,340000,A,1 24.5,55.0,340000,A,1 25.5,54.5,340000,A,1 26.0,54.7,320000,A,1 27.0,54.8,300000,A,1 28.0,54.9,280000,A,1 28.5,55.0,240000,A,1 29.0,55.1,210000,A,1 30.0,55.2,180000,A,1 30.3,55.3,175000,A,1 32.0,54.8,145000,A,1 33.2,54.9,140000,A,1 34.4,55.5,127100,A,1 35.5,55.4,100000,A,1 36.0,55.5,100000,A,1 37.6,55.8,100000,A,1 37.7,55.7,100000,R,1 37.5,55.7,98000,R,1 37.0,55.0,97000,R,1 36.8,55.0,96000,R,1 35.4,55.3,87000,R,1 34.3,55.2,55000,R,1 33.3,54.8,37000,R,1 32.0,54.6,24000,R,1 30.4,54.4,20000,R,1 29.2,54.3,20000,R,1 28.5,54.2,20000,R,1 28.3,54.3,20000,R,1 27.5,54.5,20000,R,1 26.8,54.3,12000,R,1 26.4,54.4,14000,R,1 25.0,54.4,8000,R,1 24.4,54.4,4000,R,1 24.2,54.4,4000,R,1 24.1,54.4,4000,R,1""" data2="""\ 24.0,55.1,60000,A,2 24.5,55.2,60000,A,2 25.5,54.7,60000,A,2 26.6,55.7,40000,A,2 27.4,55.6,33000,A,2 28.7,55.5,33000,R,2 29.2,54.2,30000,R,2 28.5,54.1,30000,R,2 28.3,54.2,28000,R,2""" data3="""\ 24.0,55.2,22000,A,3 24.5,55.3,22000,A,3 24.6,55.8,6000,A,3 24.6,55.8,6000,R,3 24.2,54.4,6000,R,3 24.1,54.4,6000,R,3""" cities="""\ 24.0,55.0,Kowno 25.3,54.7,Wilna 26.4,54.4,Smorgoni 26.8,54.3,Moiodexno 27.7,55.2,Gloubokoe 27.6,53.9,Minsk 28.5,54.3,Studienska 28.7,55.5,Polotzk 29.2,54.4,Bobr 30.2,55.3,Witebsk 30.4,54.5,Orscha 30.4,53.9,Mohilow 32.0,54.8,Smolensk 33.2,54.9,Dorogobouge 34.3,55.2,Wixma 34.4,55.5,Chjat 36.0,55.5,Mojaisk 37.6,55.8,Moscou 36.6,55.3,Tarantino 36.5,55.0,Malo-Jarosewii""" c={} for line in cities.split('\n'): x,y,name=line.split(',') c[name]=(float(x),float(y)) g=[] for data in [data1,data2,data3]: G=nx.Graph() i=0 G.pos={} # location G.pop={} # size last=None for line in data.split('\n'): x,y,p,r,n=line.split(',') G.pos[i]=(float(x),float(y)) G.pop[i]=int(p) if last is None: last=i else: G.add_edge(i,last,{r:int(n)}) last=i i=i+1 g.append(G) return g,c if __name__ == "__main__": (g,city)=minard_graph() try: import matplotlib.pyplot as plt plt.figure(1,figsize=(11,5)) plt.clf() colors=['b','g','r'] for G in g: c=colors.pop(0) node_size=[int(G.pop[n]/300.0) for n in G] nx.draw_networkx_edges(G,G.pos,edge_color=c,width=4,alpha=0.5) nx.draw_networkx_nodes(G,G.pos,node_size=node_size,node_color=c,alpha=0.5) nx.draw_networkx_nodes(G,G.pos,node_size=5,node_color='k') for c in city: x,y=city[c] plt.text(x,y+0.1,c) plt.savefig("napoleon_russian_campaign.png") except ImportError: pass networkx-1.8.1/examples/graph/football.py0000664000175000017500000000252612177456333020375 0ustar aricaric00000000000000#!/usr/bin/env python """ Load football network in GML format and compute some network statistcs. Shows how to download GML graph in a zipped file, unpack it, and load into a NetworkX graph. Requires Internet connection to download the URL http://www-personal.umich.edu/~mejn/netdata/football.zip """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2007 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. try: import pyparsing except ImportError as e: raise ImportError(str(e)+". Check http://pyparsing.wikispaces.com/") from networkx import * url="http://www-personal.umich.edu/~mejn/netdata/football.zip" try: # Python 3.x import urllib.request as urllib except ImportError: # Python 2.x import urllib import io import zipfile sock = urllib.urlopen(url) # open URL s=io.BytesIO(sock.read()) # read into BytesIO "file" sock.close() zf = zipfile.ZipFile(s) # zipfile object txt=zf.read('football.txt').decode() # read info file gml=zf.read('football.gml').decode() # read gml data # throw away bogus first line with # from mejn files gml=gml.split('\n')[1:] G=parse_gml(gml) # parse gml data print(txt) # print degree for each team - number of games for n,d in G.degree_iter(): print('%s %d' % (n, d)) networkx-1.8.1/examples/graph/words_dat.txt.gz0000664000175000017500000010163712177456333021372 0ustar aricaric00000000000000‹I®Fm½IëÀÒ¦·¯_!«nI¾Ÿ8вWvÛîM£aÀnÀ;ƒ%Q8]¥býzÇóF²ÎmÀgȉT2™ÌŒŒ9ö»ÿûYO»{ÝT»w?Þ¦ÿ÷VÎÿ˜¿çµå8ïúûî¿V³]{ý?»²»Ñ~«§y¬?—¹ºí>ö»w=?wó³ÚMe[íšúZuSµ+§ßßýÃnúßþå7Kw«Fýb®ÆvâTþóýo»ÿRM“]ûÏUWe³û?—Oëo÷_¼Oëæ9ÏÃÿôïÿþ~¿ÿñè–ôãã߯ý°6Õ}þ÷F¿ýÇsn»óï{YÞõóïÛð¨ÿk.»»½îî?åðü_Ë©úŸÕÞñOúgUuÖÉõYvñÜïʯ¾¾Ù ¬»kßÝ—©î;sžaOûÖº(g»Ü4Õu¶ë¼÷ÿÁþMSû›ÚÛnüÿÃî?ü§ÿ¸‹.—äïµÿÖÕ_Õ8Õój}ý/;ë¡üäN{ÿ½{4õÄó¿ªÿ±©f›Kÿ„ÿÝË·åj/cO­ª†¡µ½Ž}†ÝÐTttë59þ¢»Ú¾{§—|—ëÿ`ý‡ÿô¬®¯ii™>›{È´ËÎÙù˜Åy|ɳKþ?Êr|>ÆÇ„ò}þÐÐGû£–éy ›«ýÑn:òËÏjÝ'ÇH×?û™²•ª²%ín¤ó¤¾Ÿý¸§§èb•úVí£Ó1?&Öuªß4ÕM©ÝEÇý­:dÇ”".÷㬟ÏT–yÅé%?FEd,.…%ù±H­Ç8£•»¾ª}Û¥ø’ÙÿüÛ“Î)W 7ØWÚùk,Sµ¿#ëïtŒâc´½Òbƒ·0lj??–®Ó´gd©Ær}V·}|¶qzeâ½"Š}Ï{_ë›Ý~9žÕXw½e/ÞõÚV“ÍçµS¥³_ZÖèæ‘Îì§©}(4Õ·}ÄØT³­¹/xýcñÕ Æl ¹#µéNS½rÁ+^m#ïí{5Ý|Ì>Ê[ù°Ï`¥”‡yoSÇ_½Øíf½_Î6ö?¶ß0‡ÇÜÞ£ÐÅÑVŠýá§·¦²¹UÖÓñ¼[]-‡ˆJD¥Ÿö¡½©?¹¹-;eUÇ ¼µõ¼·O“œö cæÖ[¿¹©ÿ¬öéI-½=ƒÏdo‘/º3¡y´;¨©Üíí „¡.ý$³wÈŽInKN3yû±ÉU©zÔö)ŒuÅ»ßëoûÀ6Z{ྲྀž#M÷½·õ…×»óÄóÑÛ—æ@“m5ûÒ‘ýôÄ4Eñéhkò|JÙÙ¦ád ÿQÖ}…KlM6–'?ÚÒ€}ì¥6¡½jûi}GÌLNe;CÚOÖž«Ïòs«:P™êìšÖûŽç¶õ£ßk³´5;ÑÒþ~ÉŸ|ôŸë–y=D¾˜Ú¶·­ÔöÚá'[SùÅÎû6W'Û+¯¶_Ø,íÀ ÓŒ¯Ð!tKÕ´â£ÃlŸÚvaѓۉÈVÖ¯=Íp@vqg€çÜ£ak^4çq«ÉæÅ“Úxìë$,#CyXÆ‚!6¸#ˆîmYžbÀWªé‰Î´N3O°ýOÚ–;©3Ììà#îjÃG€»×“©ì^ΆPßu†rWW}ÿBꮳ…ÌyáXL×´ŽôåuçÒXÅz¼ávŠœÏ|ª®7\ŒûgëYð®›ëë^ËJÌÉ<­4ÕÍ>ñ}e4\¥´Ïj¿Ê«Ak=-¦fð#8lÚ´ì3LûPꇧð©òiç_d¨ËŽÔ>u¨OÖ†®2°6Ø“,åËÆ¶ßÀ2Pµ6{kΙœ)¿Ø¦J©Ø•±ïö† ÙÒTùé7L¬¨a¶ŸÜhÍÿs)+¥¼úh(X8ñÆ«Á]Ëì¨ÝkŒ•=Ë ˆ$iönö¡cÕ•v–ØS3MÅh‡ŽCêH•ùà…žÀ…Í~)QÝ=øÏìûï9Ë à”™¹ÐÝc·AwNŽ“A1Pïâflm¬ùI‡!ÂpúÒ0¾dNoýžÏêìäÌ5å¤Ô&…l4v 1§ÝÀgt ° ü"b 6£¤ö~±ÃàÑÉh@棜ÊÎ>ÜtíYéÓ³ú½kw,Μ]@?›³IXržngl|º÷Åz1˜—z›²álTç‚‘åº<*}ûA`åÁtñ’­`º˜ÊZéº>©n'…¥œÄìbªföNlnh%¯·õ`„bÏçŸëÆ–ãÜØ"Äc6òjŸ“M8°€O Úì¸pàc]½is38'µq¦ŽnØv\ …<è¬d‹Å~»Þ$G/?çrƒxÇËÛ¬9|ËÈZ^ Q#Gî­Ìº^Æúj)BAW_†~Ø“„ßs(}Ù&?<öË^ …·W6 ÕL„1ÕÂi!;í8²ïX ,G:–R}˜¯þÍ#߆›ØÂ±Y(ü\x—/°L»ó⋉ÃþmloßÕ>½--o-v›Ö…! ;8µåsÁH³$¼¶b ü}_š½P²H›€—{±´·ÇeÀ‹Üé±ï ÿ]—#<ßvÛ¬oœÍŒñ[š¥“}õ(ÀÏoGã¿AãíÑß}§G¯µÎ¹j?=ü³Œ•!ŸF6 )M¢>vöñ9í/á GVrBÀŠyж~µ±’B¹Ä,J*À:#”lÓTŸƒŸrB‘=VP»TmÏy²´n÷·ÒØ‘Ú:Qí§ Cc]Àš˜ø<4|³ðp¼¨Ái½£ÿ¤¹MJøk¢|ÔY£¢­E†Æ4/€îqûåËÎurVtf#ý¸ŠÕ¸ît±µ%ƒÞÖ…©´]õià¤Ñ‰h%£4#[hÐ@å¤î»Ûz`0¢0?9dvJñO/$‰â‹á‰0¨âÓÚuu? öOP]*Þ&­+jÓp¶Q÷Êø¨úýxŸ”2#£}/ƒÉQj¶",µgF—pïKwµúE»òªc7AÁ¦"2­Ê‘8u£ŸLöâ°(2ß9ð‘ýv:ŠŽ='Pù\E3Ç*Û#˜æ5q¶ Xy“ÁšŒb³_DÂxSL]j~‹Q•±CA°Z¬/;ë"ÛQêH,ç™î~i®§©VÚƒ·Q2B.óÑq²¿l@žÊí+gæ'œ‹c*D8¾¨…‡µúæúÅ$ÍÌ ´9OgMÜ¢•¸4¯?ê}odžÞ†µB6Q_ûEëô§…Ä~V¥ K¥ŽP0!kÓºÒq»}mk[ç@dXÕs W^«?·* ÞçN«[Cª–•´ë•òõ/Ž´è‡î@2=¦P ·d¢*™Üä:žfòB¬ç3mÀzÖõQµ…µl>èËwÓg?¼°<,5cé­z“Ö­¥•Mû@o^ÝÕMu_:¬8LÂì¬Äà¶Ö‘÷ªf-XÞå€û»º÷L?«Gig}z÷d‹ø rÛ"´—+tufŸWj’keWFEî/ç\_ˆ»áE*úI?ï[À*‹õn„­áÙ¤üF©VõC4±V@Ÿ /ýgmÍp H‘˜åÐØYº‡ñ“s"Yíµ3ßg¶‘ø„ÖnXZF>=wbYëQ“œlÄLˆ¥5Íþ Ÿ FË9ÀÁÆ ¨#‡±P «Jç}±!MõéË»©8„e^tQ~µû‰„ÅNFZøDÞ2Ûr§T·ÃžŒ_Ù©gÓo°×^4Vc­ôåÛFMƒ(E+XŸ"¥xÉzþÙk]p¥·AúLõŸp/6øÑôö 6°–š˜E¸0‰>¦øªœÊ·"l“Ø)ªým†i°Èiž«½…'ìÎgUÌŒ0!ý»Ûoï8›5°ü`§Ñ©“·¬f±Uëù¸÷|cò¡v¿³Í áÛVíÄýâ+…!/ã§ã”Vš¼ƒ‘Ù,ÓsïÌ+±ç˜Ë Y ¶Àâ\§|¦KÓ>ðÿ¬Â!W8 š%4ÌUvç  )Û~êæòõZ)­I9Özø5Þ×M¬;•Üu¿³©{A–þQ Èã ¸°úG]íÉ~”hÐ`ÌqâßÙ‘k´5­û †.ä=ÝÄÙRb"ØYÜ·ŸÕAÙ„ü)µiï(ö&©/{^ Ó—D²]å´ŒOš¢“.>úƒ¶i/<–\ïÚu•c0VZE(.¹cÈzNSß *X¶îÏz·&39äÇ)>‚pd¹xBb‰êÒJjøLªVVKç}¾3úžo&®£p„¾7°–pRIÞB _‡^œ‡“Ûa¹Äâ-èGŽá¹?ýX~»(…a‚vÁ/ºøèûÎÚÆCJÎ5Ôõõ ¼«±vIJZúîÀG˜ô^“m#mH8¯5t¤SyP¶î éiéh1`Lf4÷Þ™%5ýÒ°´Á ïY„ÄIà#‡U «>v¹ÉÖú"+—‘‰¯Ç…åóÓ–äßV¶ ¯EŽS4²C *UW*íwþÃû½tö6Å^é$¾º‘e‡‘ä´^‡Áÿ‹ù,ucT¢½¤Ñ F’FŽÑiÒí’k qò‚­/PË寠ôX+tÕ@ñ¡/ª¬Uc„ó6–æµú1ŸSá dÕÙÐòD ¨àj;<•NH¢¢  Áºu²u-srL[2-_kæ06¸9‘>”¾z¥ü Ö­{Ý.bd1ò>œ•Œî!Õz_Æ&d+ŸD¸Ë#źÀÐÉe_= \]7)Mƒÿvìm©KT7Cß^×28Å~Â.Ó'&‹øÆBbMéôÔ쩇‰Szrîœ9Úή϶¸RåÀŒLÏF˜Yömë>¬·Ö³måçÇžÿ†¥þ¹6åÛF±Ú èÐhEd#`Äz\ߥ=ðZ~–²O¯øNO/CŠka\ÑBápÊ€x×òZöl}.\Ÿ•4Tžëƒa2W’b“YIÄ#Xû%¸†Ô&ºz¯&q|R+?lÆÄ>Qy<(³å‹ñ@Åúäb=v\|…ÃNEXXÀ(‘Ñìײ©¿ImНˆ•‚ùÙŽ™„´‘pF®´“sí­øåD½•Öï[;ƒÑøÛSõþÀË´C¯Œì(¤Ôç€ÕmYWÚÑÙˆÙÝøô¹ÖUÙ¨T#P{W*]%cµRo;2OÅ–VµsÂ:Ñ-bZeî¢åý¾™7ÄB#G*ëÊàJ”zMäÀÙ-íXÝóÂ"Ga$ŽSÛ°ø¤àXB#¶¾~=j䣓… þ½nÍ6ªJø>>j² óÚùèÇN35öŸ lìQ8~±oýbÖ+›le+éh`)g!.ýN2^-¿qF–¬égA8½U”tú„Ï5•̻ի¼Î )R¯‹à…­ð˜×‰™n?øç(TD H•†!L‹o¹«¸š9Z)‰ ó1rlÌ.ˆ ›lµY³aðÕ3†…À9p#Ê rLT)cù²›ê·_,!ØtV¯µ¡ ¡d©¢"¡eŽteö“êfØGâJ5جÎnòy®à½2ÛUÍH*£®IÛö… ö@†‘șР;a­Ù– R›£³¸–®«Æ=m¯;È¢x4]N­†K—JD^øüðzÖ>Úæ ‹óö½ž¶¥÷(Ö€2[ÍÈõÈßâè— „玩÷âÒGâgdG|Yó Œl¼”µ€b‘"á´;þöl¤kr”·ëë!\´.\±£ .´¶.É 7I¤b9/›åËe€º ¦ÌP5ÅþË dð^ AK„ð'L@U½r‹œ§Åº ?Ci§€áè’k»û ªÑTÔó¦ )¡bˆO"~©^>ꔵ8†W®Þ—,=u2’Ê­ú×÷#Mí4,.РÄÅY?®ïw^tåÌ‘…Ü(…µK²Å•j§šš9Ƚ»¦‘¤9ÕëÕmµ—„ƒâ°W·] ³(÷gˆè=«ô:è÷]¯t’V‰•†p¨°ÈêqàIá׳¾Lýå|¼ë³¿²Ÿ}­3ÖæŸÊKb¶‹‹É­ÞŠ}Žrã¤Ñ÷ <±ƒèg/eâäÙ»æÜy;VŸÈØ£³ØøqåÙö»7»ï¹ø%P‰Ð «Œ¨­Wt¯lÒ‚”JNX~¶%™02.(‘fÜ/H²§ôåÕYûÖJµD³)ÖuƒÂ™Si$:ÕÚÀ­•KZÀc/"‹¯ýçX"Íâ—WÐ8ž{_µýÕ ¦P©L¼Ü—@U£VÑ¡Ðßæ\a’º²¯ãpV½Ù…û~ÚVPÛ÷AÄ„…MöÕm*ÕlÓËÖwu »4 à¦Gyä‚ÎÔƒ«œÃý+h!_a¦ÒЀfá—¶¢÷\÷ ÂKDZZ ªDÒ¶ëàBû¬›‡µoË0HD9þàÅF*Æ!%¦q6û*Ñ O¤ï`ß%Ó ³ ³&±½¯ú@­0‘ /Õ¢SuÚÆ¨û´º{À)ÓÓÝzUýå!r¿ÙGÚUëÎ 2 «ãÿzËKö×çŽõ˜·W1o“ÀÃŒTw¥?Šàog/vtNÏ©ù}²N¸ö UþÓŒ°‡rØT4¬6–~¢Ú/+-Ÿ.5`?íSHŸR;sØbˆRËJª$N­²ÉUHÕÅ£& \J«wÝ´º‡‚×Ag¥Å¯Íä ŽV´ìŨàÚdD Ù\†Ÿƒ¯£ë¹"¼D:×mKçRs°ð•Ä›¸HVt‹×–Òy#)úEÜÞ7ãl:CW½ÃK³ÿgVÁ"ÎBK45œ¸ ]ĈVG“b/Ž a5±Ž ]ÎþƒùiéWÕ)E/?¾0³F*˜IàSù%°ã’^_÷ ÀqÞJn‰ÂÇ‘F„e# ïåý ¬Ù+[ƒ¤ª®Áîã4ØŒRïǃÛÿQF†º½ÂªŠUA~Ê긜ii§`¡ë¶Òƒ¸†‹w01œÝº‹¦þAºt–>Œà°ti-­¯£­µJ«úÑí;­§Vm“&¡ž× 9¥þ°Î‚##ÝB3N«OKkV¾ŽôÓÌ¥ËOÂÁk-_ÁôVµ–Pµ½LÉøâB ê¥]‚îñ­ê`'¨ë.zßÔEBÞª¡—>:Ü]¬8+ža‹¡Ü$äÞgùŸKeéø BÄϦê›ô¥uå·È2|ëŧ®8âÉ­A˜ÃÅ”JÈqƒÿóq«YŸ‘ ¬2®N²±%ëò§·ô*òQ)«Ú²ÕÔQ—¤X­ÂJÚ£Tiù,qc¦â² ÷.6}h⑊E>ï;ß*ӼӢР{µÚVéįªù)uJ“ŸÖTÒë6ùŸ}k”jÞM:y‰-²º¹ñ2|de«a77k$ƒ »7!ºµ€~ÆÙÄ3ºRwv.J~HÃèz”¸¡›w2«´Þ•פtu%,+ÎÖ×<}ö[61dÁ¾MZå¥ÛÇ9d«+ gAšÆ,Nˆ7ÉDñ ìj€aÍ‘–³æ˜©’øJ6Ñ`f¤<€¢ÕÄÔ·ú«´í–‡"«½þR»˜›Ê(GÏÿ’µØVéQ¾ÁwãWßì úç'XØúøSw¶ÓÅIH•uëekI)4Ru%íLî«q:üZDXuò%”Q6ú冚•ÍNÿ0tÃÒG¯”çKÓŠß<ŒpvlãÖsàÑ-ü¨ \1Ù€ºM:Âí¾¹V¾ŒûÆq$•ÂùÖKd‘8Û‘š­1¿£gíˆC@µïÃÜJRfÇÒŠ´cËÐY‡å”š¬¼Ó¤ ¥_õh¬¹a5º]`˜7×Zº‰¤'4µ7i&†J7ŒÐòd^kU›ô«X¥Q)㊠mvÁPÔF¥ê‘’vL¿|Úq&íA¨±èè†vú6гYæ´=,"æo" ¥:$n)u>ñÛŸOt“ò0QF=î¼ÐñÝ“P!¡0®.|ô±¼'@`ÿ]3¥kÕQáŽ$]V¥3Ÿ€¡§Òý8‡æxÔs‚[B“Ô¨ÉÌuo¤þœÐöØT}º¶\ˆ‹”Òôªöd¶Æ Ïñ‹2é¥9i )†ìÕXk*²‘‰®Þ|“BQµq:^ó#;)ü±€H³ÏZUóªþ+éËø±#rù&ª16¸fØ?bâc‘©qdwŽvÔ“òf‚úЬ3žó£ 5­¹ÖÄ@ÝalŒY t 5ŽNm¤ªÈ, Jcm3y–¦UìxS‰ð%J$ μΔNP‚ü%%4g@ð…}Z0ÊÎìí/’Yå2׳澾ñs¬ßȺÊáu ¶­¥oX|–K,£"}&bþ&Gñüuã:ÎDœ©ÁÊEÍÂiºôBš^)&«ŸåSóŽðä"C÷ôìdñ1!D‰b­‘"üYÊå(L_ky#Õ¹"Ð7 )N-%š&·åZ6Ͱ¢€«û)´ÄÍ•ƒ´´cÕœ´³Ò¬óá¶è^n¾ØÄ (üRÍÎØ¹A¾Ñ›QË›ß[ Iyi&ÛfKÛ¢Ù™8ÂþÀö]c²¡•–ƒù“UâÖº†î )cét°-]ײ_ôbAyë¶À¦`mq°¶ MÅYŽž ­“w1½¤ŠÃ@$7×';ÌÎîpä¬éTÞ–/Xw·w9Þ“¤ëö®ªOÒ¦Ùtm!¤‚Cëìš’ÄúPöåà3ÜVÎ&ݶºÞ›4 cÜé>›Ïpò¯L¯ª”Qe&ûÛ8(ûWøŽ€êOe\YƒF;ÞÄJhš(×Ùoƒ–y®ì«ŠX,éJ$‘Ø*pÀ©Šçý¹6âäÜu *)(¸¾ªÔPhh¤`f»¡r­…€kc 4Y: ŠŠµp‰Ô&§Úl•´\4P}~¢é-b•^ŒÔ0ÓBJ£„«kƒ9-†H—°š«:;1¯‡‡ßÐÜI”\.6¥s0q ›{‘çLà bÁ—’•ç`'WÝ„'W¸qfÝùÃ( Õã¡¡Ú)–båb?’LÓž-È>—ƒŽÈù¢PeH:ª?ÕU¢´0/ùž©°aP:‘Î<¹ùìß0¹dœWÉýE‚À…³ *¬…ŽäCBxcLËcCÆŒt±#ÇK6(º«­×Â1VîµãžçÂTÒ©3¹åà‡MnÝXúéÌY©,ø§o¯FV¨:Wf·Äf…z„w¤áÛöz§v°–@×§.D…ÉÿGÕ•¼ŽÚª“3Ъ‹<E DWFR›šÔï4â§ê*aj<ÈŠö„4À¦mdÿôwòk?\ÌlÚ»¥>8WuèþJF‘-ÖC}Ñ^ŽçÍrýrrý?»$&qïs7r½àߦê¾úú lõY°›{Ek*íéáY>JjåþæeÉP_§ *e$ ´¾p‡þ[´™ÑÇ} ºc´ŸÑåYn$ÍéXÀ®åø`g\«-ë£Qqß?"{IÕÈ*JrÇ 4ƒ£û[ð²XdKŪ†tÂwÎeÙ.¦Šö°ÑŽRicȆ=S%VU/™ú!>Sè«ÏÕ·ýl~:…Q¬¯bñ©lÛþè («¬ÍA  jÖ¢ý¨¾JÔý.U_Üd6ƒâEä3â»ü‹õ™È.¿à+^ô%O°°üÛˆ¾€ ôƒ˜°8›â˜ß¢oá6ÝÕ7˹‚óK ÃñcWš9ÍçªØYÕr«ïkÕllÓ„jßYjqçõêÊ}c¼iOç¾S¿å@2G2\º®áÐÚÎ.á;œœ#Ã%?À£ª¾‡r&­šmA}½˜ïÙ» GË @“àAB?¹éÁ·*XØö»—XŠÞKü]p‡Ìj$5×im˜³]amEn‘UÈj㎣]¢<Ì|¤Ê &ëJòÞÿ†^ £Ïôªqù%X‰Æör#º»ÔIn©ÁÁïÒ Mb„K(;I"aP°‹å ,•=ôéŒÿ ºE3q{—Hu™©ÝËîºÊb7’2KØHw٢ǿä’Õ뎴[}lc‰ÒŸ¯•Ži&|Ô[è)gUbBk2½›Q’û´Âs G¸KiQ§¾î™ š%RJêÚÜ(#ìp½ šÊVlødóÔó•!3]©‰Ê»´T Ö•Ò}Å\—®Ü»ãUÂ{¦KØh"3#¿7÷C€ ”,’lT:Qùñ¶Õœ1{·OäI)ñYÿ5ÖP©Ò6O®¿i/‹•ÔÐô»8¤©tJ“°ð+áßànRK7ÿ»‹Å¹­’Ê–§¬Iïp9wäMSÜË] L2wÛ£–9hmÝ«¶•¾jFqcQÞ+ðñ$´Ë;–¾¼fÇtà\’´­I;™’E*Â?»ËÀ7‚Š×½²Ï«Q‰¡”¹Jº–h5Ëe˜ âaXÁh"oéGÒžè½ZdÏ Ëm;%­\ÝBë—eÙâåÞòVKPlÉu.¬ÐZ†ž÷Zh >q(³—õ¹ëëS(…E(ν–ݹeÍmÑEÁT_ï,¹zÌE ­N¾‘jΜôâºv±ÍG²yJ0($•eÌ~ô `/9èâF¾5ƒNfÐê…QÍŒ±)GR›M2ŽêÄðøv±&]¯Ä d~”ų#ºwñàVqE@Ä;^÷RàÊþJ]¾:ïê»Ò‰ô¥¯©eY£òÈ÷”3 4ÙÒ :¾‹]J ûQ&Ç!ÖE© ™¤-™,ÕKaaqÎÅ¡’’7¼zÛ‡œ—Øe ÚIæ®wب{‡5Ð-‘,%Ûîœü0ýÀëèxP¦i¦E6t—œ-/\ô-Ǻ¤›¿ýô¢Ço„:w¸¥–ýáÅ­ Œ¬Iѵì1¹áDF¥n\P?oPq–#eCs o}éç¯Õ™LŒÔïhqZýz¯I¿½…O—Ž£ìY²Ðù0¹†7å±Ú„‰÷ éøW-#úmᕚ–oÒ”nµ*²uð{ÚüHR[Ù’¸`Ýì­ï8܇’k EÞYeK^žÇÜ!¢mŒ'-h ÍʸKVo©$ÒT˜i)(Ç‚2,Vw:I{ì.]Eƒm¨` ÎøBê|—΢+H‘ÇÆyÛH ˆ›e½t¯³ãfÕË%Ôr1øe_I§“ÒÃ×Û¥­é:ÍÔ8î/¬?y6Ó¡¡NR® Îı’!ùö¨É{[Œàù5ÈÖ¼ ¹H&k6é!ße·»ÿ“ÈÕÅ­Q\®»tÉîwYKQ¬oü”oÎl-/Ñ£áÁËk%m+ýÖ&ò| pµE}‚Íl;X‡ªŸÙVíéïŠ^?Q‚³\äªýØ•¹Öƒ“o(jëÁï»L¸|¸tµãÆ@{꤮˜u—M6™‘Ù¬J+Ö£/á/Ý¥*˜¹M§LßÔ¥Ò«ÒÞÉMm”—àÿ”¦u.6l¾³üeôd´®r}7ªØ7`ßë¡Ò9Ašg´â] v’@²ÂRµÌSè×iUíÁá`Uħ“Ìø[ÇÇ$Q¥d¾.Tªßß»a*½K¼ö L%?,#Š]@B…ýå"PŠQ â'›è‰s%ù±ú¨Ú ¬K’UF¥>ˆú1‘Â"õN±ØBñç‡*-,7†°2Ò…ôäú4QFK'àk9n~ñº&wÂgÎî«8Á&›q½,—HuÊþv@®T Yáý°èYDK=ïù樔iþYŒ(KÑíZ‰úšñØy‡w.DÁmi]ÿN ÀzÄ*O·ÄòI^5RoË—êàï0̕¤»"Ýï²1{E®vC7-3»´.2 Hý"‡ òŽàTaNۣƲԯ? `œ+õñaÚ*j.h$=¥»”™ÈúQfÐw).%ºò`–IéÅÏj+WJ7])*S°opV—†ðì|ø3ÊÙ÷åGsø•³HÄ{>º ëáÖŸ±2!ò~wCJ“¥A~l’¯´Ü€B·TF", ÿÎUû¥ †e³:>ܵÛ)ü¾ý, 9ÔÙ¨Ô–:ȱ<þ:Mšª]Cm ¬“vª´å!ry±U–9°±eWÝIÁ¤'C›Aç¼Jß\LâCôLÉ-,VKQa#»¹'îbÊq×t–b±Õ%HÉ4.û5ÎþSõu¢´ ­ç)Ë §ØŸ{çIHñBS|r<É ®5Láð‰³ï窦aàËOPèŶᗖ¿_øûå,idÈ«¸åÇõ¸ÜÛD¢†ÑGÿãî ?¢¦uœØuCìúC0/}T•-iKñ*IAnµÚôQ‰HpÈOe&­e)©†ºÞ%^Qh…_ßnl,_þ¨`„’Õ7K9³DÑè—8³ u;«>åa$që»ËѱYkkRüó£ËÀÄy-ní¦©?õ© C•JU挌=F"$qŸt†'\l”µ}é]â?7x²#ñ Ã'‡¯ýºÑÚ¯[´z-keSæ,g«“R[e¢š†ž:òñ?(ç{X™MÉ 5’6xâ· †µ¬§—QT`¤_ žÚ^™à ¿ÚyŽ LÑV8•‹’}ˆÊ6Ìè!b(‡Š½åˆïY¶žFÙõu/GQ¢ò)N¤]i«©SÓ(í3Ù*+é„彑Ã9ú‹Ç­‡ãlkº‘,¼péZ¬zwˆ6jªŸ©³Ò¬Ð‰•²E,~] Ô~ö¬© YCº’vëÁ_Ý0罆1.›««À–%›ûC$éþDÛ{M›N¸•åÞ²¹TÊV£\o¬ûí†Uèùj([7cuD\mŸmýâNK]ò,?Q@F†’pÔ)"ŽÚÑÂr¥9ûý³¼ÝJR›KíxKý,}ÀDë3Q‰‹õkAiùé’Ju…ôñ,vd*Âù)dx©¢h­­Õ$•Nª4½Ë<ž¢‡Òs¢ºÍ´5¶Œªé]h¥ù—‹©ªl˜Ul5ò?Ñ–ý”ÛÜ?Ýv‚”¥È= C¹ ¸Ÿ2íHE¤àäANFh}–®øÄ¶RhØg9”ßÒ%u’¿y¦MsƒÊºóé€$–=7eC~òÈ=¿¿ÃE‡/9ýhiwÜæ†;*ÍÕ_¿Ÿç³ZVW¢JÃt#‹$0¸øÈ¹º huC/hÛ8e¦Âd+1ØZ®eú!÷ ’¬ŠW8¹s5_ÒRÖ°¿ú«¶zêý¿}Í@ÏeîÄÓj«7®ZHkÅ7øñ¦´Ââð]ÿq™ÞS&/‰Á* "+3>q^§™.­‡Àó¦ÖlÚdgjƒhÀ(x,~Ê'utÁ,„!¸žårâ)zeT–R®®F‰]‘Ä‹z & Œ7.á2<Â5þð¯j#9©šúg)J¶GiÂó(±ÎAÆo_EBÛã1”7ŠÝD“W¬ÆÛŽ.¡ç”­û°/Ñ#ƒÃM Ÿn«ò¬šú›%/ß7è1Uô"¡#ã(›êÑ{´•*T-•Ùw’L7—Šp&?ªèSºìiWnkØHdŒ¿Ýѳ‚á#b-N¬NHå>XÜ‚(´W­Ã¾J›Œœ[¯)ÐhÄã{.—ÿâýwqW߆‡[Š­Ù¨Ôg™ë²,ùÀûËè™\ªeŽp\ÙÌÊ®×JÁ[W¤bq?eø~"MÝyA컓é*‘h¦Óêétµe¶¿ù²xc¶´ûûùñÃìjªÌš5·䯇ÁN!å˜<%¨Ì/Ža‡ê4ê­\“" èOYcÈ?ž÷ÄYC:*Õ\÷¥øO9µMÃTáÆVi0ɲ÷ÀO-ÌÐä-ú0ZðaƒØú_At׳Ç_z¡{AÖ.¾€ûü0Ë–– ú Ú>®áþPŒã’«Nª4*Dú&®WºõGT´§d ±ƒ!<-X*æ¼ô‹èΧŒ$²Bº×MÅjÃKå­sû­ÔOl?¹9ðo}§T/Ùa”îìXŹî}ù„‚aûÀô储“ë%'^a–Z0*ˆê%£ÉýJŠÏ-\R÷åi•±>(ƒèG¯Ò£”I¬XpA®ÃNöañyy@´ââœØg/Oï ª:ç{…V ï ›$À-æ!V/Ü削ÁñÅý š-ai ù}zLì÷‹“6>ɤbå»ó'¬¬Í=Æ¥î%ã)ÝøÈ‘9)È#u§¨¿¸·w]üù1œp½%‹Öò~çðc%ž” u¹Ý@K C£XÛ.‹æŽ¤®rJO6z6)맺l?-½tj¼VYŠü̵'bŠRŸ?J対ÂBá¹5ƳèØ öØ’Yhµ%›-uÑ#èG–]ªûÖÉbUf_®®»Wßùæ Åq¿Q5ÔøõMÎL|ö#CÌëÛÚ4nìJÑÞž¯–ú%¹]ý²Ñ~%µH‘—Õº¼î„îZqi?êV±@¡Ä#¹ïËQôªÃ¥^ ÍJXÏZþig¥‹ËLê¶­¿-´Cè³Uø1GXs§ð oícÿQweˆúDq˜5¬î*e0>j'\õÍ{V±”HTÇKF{]Yê‘FõÖÔXsmáºÃ€7x¦° ÑC-K¡Èÿ,+ª;ù <5ûò]S²,¯Dj¦L§^ 9ŠsvJ–ûð¶”ÿ—ðMýåðÄ’ªU¶òèbJ=#$4õþ:7–Þê«¥]}=È·¬-lk1Ü …bXÞ ÂÅ/Õݯ‚Q3èfª`ŸSO¨œÃ´É¢ Í ¶øëÁ~6Wˆ Ü}±íC­þªÙCdvt×_½“}òΦ«¿»rýø#Z3µÜ½,ò‘(Ûü)eöøGĤDêÜörÆö¶¡C’‚JKWv`Z:”¥Úì ö–¡‰D]¦ê®Æi•J›½Î…hñX%@ižÛàÿ(8Iì‹ÿHÅðO…BAnÝøÚ” -•õŽå³ˆw=»@}s!~üAÀb_å$,¤N3ÿQÒ5Xþ©îöƒà8žMJ-jö,vE$SĈç+œa˜J'8ÂTF›Ð'˪Õ.õOn»©,À7ó¥mVd(|Ñ{èvÈ!V–½$ŒT]~þG®©ÎçcHf»ë°¤T.¨øtàöýà¿X¦Ùõ=ÿp¢[¡¯ÕÃÒø#9vt–oÐÜý »Û.þY { ¬HꬉšÉ3—?ºþÈoÇ{ì—÷˜5›ùÇ7öŠHÍBÓï¼ãl™ôµñü(ƒ[>—1qÏVÁü@ã·wr´îÏ"CÓWùÙ,«Ë—¥· =•a(¹2(èÐK>fcòVÍkùRuÅ<ÿUþô²Ì~UŸ%ýUÖíNuÑsäö5_ZÍr çêÌ ¨rÚ•{=ò;„ dܧ|µ´+ï–5=%EzGqî…ê¬3Kîæ =[¾Š,Jô|(í׳|ÕûAiµÎ­Ç_µœ0¸o„—Œì_õ ‡?–óY^"i’s"ä4è¬L(éÍEÓD꬙ù}wcâ3•¤+)sb¼GA7b<Á¥‡î’ÂE’ÒD¿ ay?½$9ã2|–Þyo9hŒ‚k«=u ‹r·RzaG¾#×àc¯¦®6ØÆÐ4¥+i÷¢¯fA²öêðøTBXù«Úoa>_ö®¿t”UY„¼Ù6™7£Ó&¾Üi_hkBŠøÅ ®+ì…ºú^^U ¸“Á›n¬ÑH±Œè –+òJª+è»î¬G? ©‘þ™®Î5´Ò,Xäš V_fÒw‡†Lˆ›€‡/Ê(+t@“!ÙijÓE–Äi¶k† òÊ}Ùs©WO,À‰.ô¸°|¨^6ø±,››‘˜]*ØR:Øâ´#­Tj?_6«{+ý,–6ìBÌAô& Á%ÆŠ.[<£ÜÛë’ý{ä+òªa3SpM–#®ÒÈX¤‘±ˆ8“™*2…jÊ_$âÊH…oJ;×aHÂ/`¬ZÚ(¦¼•â3ÐE=ÞÂHë>,Ô°^O>PŒ•¢zÉÍkCâÙ§4õÝÃíEfÜÐÕF*$dƒ¾¦D„Œº¦ìÊš`ÈDE°nÄ+ÍOqÐ G‹šF‘ɪöV¦ /´u§”ï‰þ­.åþ>Ä|R~ÓÆÛ^[ú|ç3\ä:δ¯(tH¢\ê¸k+:Ö “¦[ñü˜æiêÝÏk#©tE¹Ô(.†yêmv×Ëkò4OƒÑh$é„®ïªQ >éw$ªLz©¹–]ƒß°0†åViÔ‹¿®"£( ,2_*_E%×;|áDà(҄ʤ_#úDzrÌUYIH¿µ~¸åŸFô[âLx^Xà+î˜Ð,ØÖzOÂù‘yÔ-)woVŽÒ‚´KȤÉVùÄ•(œúksPè3AX?§ÄÜ<6§ªËOJAq˜Ü'¨³ÒiŽ:ÑÎ8+xi¤-—ç’wêâ$ÞsÃôÄ)„·ð©–YK#‘N†TVdg]5|‘›Ýé7b¡æ‘ó®hå$¦÷\½6¶ñ°~Êí\mÄB”¯›¡©FôÁf‰e%Ù•i. GâZ‘`µØóQPß ^O¢Xõj'@!_>©GagÐ-ÊÑMå=zf»Y›¤D"]+ܪ³9i ñ£¯ªÚû§“¢r@–ËÉ£'¨¸œuÑi”Õ©Ö’yà“á55Q[be£_ȯõLú ºö(;41Q#¼½”‹PdŸH'îÄJSß]õª‘¶‘|S'bB7²=‰‰nŸÈêoŸC@°~üoÄYÍ!WÄS5|LJsUF¥“¼†¹Ó¡Ô%ÃÖn@|Ú6Î\m„…4Šs§[³’þæí§.kcÅ*u¾Åä¬GwÀ’´¬ž÷²Ö¹f¤è¾©~Ñ1rËDÌ5ñÒ €ÓÉr3U×½H&Á0SrÊÔ4º'€‹*(Zsœ‹|‹Ý'ÅQ—kó•Jç‚&)Ê–¼ÇúsüÅ»Yã A~Ç x‰h`[Jìâ‘tг—BÊ£ÐëpÈx>ÕÃ_PL« ¹VÍüìIGÕß—lf°”Ιôó2ç¡q±cË—O&äzæ¬üñÇ,à‹t Ρ»úÈ7+6\NÎ=ÀWš@Å)¸™c³&Á=éé+ |9+vR{õ ± ø;;†( º'gxÚ‘Ç V!s!‡èË‚ÛÄ…ÿ½d‘ÔäXRÆ”ºfhh#_CöÝKŸ_èAFqÅriÜ#1ùCïµÀx×¾š¦/)iÙê1Ç"é04ò;Ô`J@Jøú(ÄÝŒÖ+>8Ë{ƒN¿nÿÑ‘¾šrª¢›ÉÖ]á7tX“?B TH§F<ç³ë´oNbdÈ­KëÁgBÑÆÇ¢Î¤+gIŠ9íj•R G†F¿&ûS¹'…¨xrxØØ×ÚêbX7ΰ&g-ËÈ þÞÒD j.­¢à{tás|Ü\î-#«ÔÍûÖ4ÑÅ<•¤s©ï†ã*²å_ä)M¿Ô¶'ŽaöÄl>&¿Ý/ ³&‹v,`„UõìT$2ÙgÉÿŒZGéK¿ÁOÓ‡  l`‹W“¤‘³ÛT! úEŸ>ÓN‡:ç(×gàã}k­¬Ç¿N„xèR" ip½©l”SŒf¹ÖÌ7i#W/¼ ^^Дªæ0õæQX’!íŸK=¤’®5Þ½WAa‘ñiøæõuÛ KWŽ{·ñ¦ù–ÆU¡C©ó b1×Ë¥ gÛ}sØ[p¶e `#ÿœ1 9çläœÓ­e¬\»w¨°Õ®Så§YѼý+»k—¿}Íà{âÂfùZm­,šêõ:ZY–í˜v"±gy¢k;<ƒï¥fUð(ñ¾›u¬~9eV [«-¯åû@fOmEÌN¤Ï^ÆàR-M#«ÞpÞë~¤[£îžzÚò^K4ËMg¹7ÞC‘ò¨‡¡1iJ¡?ó¸Ý¾ð¼ç"®k[¢ëåã‹åñ½-ÝíÁY!>Y笷Ìòƒy A“¶üc$®¦åh2„É­F•|FÑÏjƒ‹ÁX˜²6 ’ž «Æ½DÎÆo¥…B&|µwéÔø¶E×~Ÿ)ÇûeK(£]ªœÕÁ+ WøÈ¡l-¯K‰Ê׃Ïq'{×`¸×–.OŽ>Ä~à~GU°Ò=*m9@O¦z{‘W®æ£ûcF$ùw¾¥ÑY®“iE£}[T)‚ßÂíù¥^ÀØÆ†Ï%Ç±Šørî°Ê‰Ag¾•?Suh qG^üZE J’mÁ[ÝpØà{Þ*“‡ål6ó ›µcÝÝ¿Sšò”غwÔcªÒ¸KÕƒvNÖAp|­H1Â~îÂôAG][)öK[W°»?°¤ŒÆ‡8(''–¸{D¿Mfs‘#c­,ÿE%œÅC³†`vg/AÐ-7ÚÁRÌ‚,›V×m•Ú<”@‹»€ý¹pot®¶šÓ*—4qXØrm+º—¢+k<â [ÆÉ¶õ5†•(eÿ*??½V^]öoK¯b Hµ2ÃÈÉYø•9èñEXN ]e©œ¡µN/p¤ ‚ ÂSÈÀ‚Q-äÒ.R®yÇ€ç*-&ûÀ)íÇ„q‰ÐKŽå‚P3Kˆpù>¢$÷†­¸»’›§!„¡n”®YïÙ¨Tc$@DîRxî¶îJ4u¡|­‚¡sëÜßHþ³ø7ªf+B µSÇ-jZ¨!ÉÀ¥…DSÝ<×Ã!}Îäpó1ÀKÄN–æP+*È}s…/Š1‰†ZéËxΗö@å§Mk “D4KÓ´úWžþ;9õp°2§’"•&u½­n7·`­Œóxsç£GÛ.4l˜AI[YçËû1üü6P+& ž ÕßNä(÷dú´Îìi{ M°†ùá–1‘k67t§¹=(]Ö!G…o§q™ ö¥ (Cÿ¬Kk—k¶ýæFH‚owþÒö <ß8ÇýÖ8‘™«Ÿ·²jP%RÅUBƒˆ=­l ƒhÄi%ž²FL¥:”emÑz’Öõpò þÊá(òDY\üÐpù+sÒy`Ž“WæJ©áޏ³ÁOš\á9…°W­ttÈnRõ>»¦©\ Å0úà³ÕJM©»Ú]6v[¢–Uï‡Õ4 Ò'ÓïgfIª:ÊG¥* H’¹¯Ø"s÷GoG²m¹>Í(³þ3¥N°˜Ðêmž¤ó¤ÇOð6Ú>M ~ÒÃQ¾IR÷§ê¤ ž« l±aßúÏà ý\ß Mdç"¡ºœˆ¶PMîJYÛƒX# ìgŸU=dB+¥9m ñx#±Ç »­Vcp»±ÆEoΛïkĤS‡ö9ϱǪ¾bšÃ˜¶+`éEî}—5ûÂwK²PÄãðwyf­‘èJí~,ð–dÕ·Ë¢°–å1ÆÑ-|ÄÚLc¼¢“z1„/ ªJbÁ]€9öEÛ-ü€´šŸ Ë}®Ép ø¹ü©_–¢Ïú´ÂLZ牘4¬¤Bž©nÁ[)Á뢌îM§S]ºŒ&g+ß™­|g¶ ú&íbëðàræV$“²_×Ó­,|<÷ª£RÝ(Ç&ä¢m\ä$_¾~ö\ò.¥ØÉ!ØÂ2m}jÐõOC¹;P‘îÅ`(dÃGYÏÑºŠ¶–º+ my /ÛŽzð‹´GW¨—S;BF•Uv:¡õ¡åΜÈ‘ƒ4ž ¡D‰Íw< R:®¢Õ:13÷µ¦N9vÉ!•ã³ÇrWëÜz*)¼;6 ·“3ôNÎгD.óDv5¹®.à‰{$ó'|•Õ^Ù´~‚¨ÿ9Bµ7÷·PÐìnÁ“!©dC]Pãqˆ° ‘×-ŠgGã²ãÙÄe&|bäNn/ñ4DMçP¶OFÛw+ôdV84 ,v¦F_"è7°ÒÒZj¸¹¥òR놩Fxm…¹êHŸÓFê±fúϪ|’N8~9ªau£¯äÂåöŸŠ‹Úöîø*ýè¯WÄpQ´–¤ gÓžs%Êe‘Š}ѽO÷o…±P쯀GçªçÔððl+vS´²"ºçrzQ½^Úà*¶¿õc h¸7CkpOùÑßï¶ú}[™i°´3òŠ|”Ç7É;äP2ŽŠp§ü$ç¬þy¡2PD6}»þ@gž1? •1Ñ>ýÓþð„§ÑtzÇZ*öî|»— ¸ÞuÕÈÖþUõž ×Yý A’¡ò˜5I%wÄ`‡àR€0× ¶ D†î÷ü¬ö”2s)njÓÐ~Ž•¥ÕC¼Jÿ¦-òÒ}SÍÍ;>Ú0m3ãŰ{D­:Zí.5÷E¨ô·†÷¶ôl-÷ò×㪗7~»a›'ËyRá®z#ýܰñrLâ Ÿû¡&Ô§eK»ÏÜ=d?Ìšj]ž‰qøâÎÜ¡æ©éQ²µ´@'î@ÎmŒûñZêÖ›û D›|ŠÓÉoÉ·^¦¾2NRÉR­‡YŒ‹NR"*ÌrO–Ë…·›Û9Æ.Ö¹—ÃõT‘Ó?ÀÅRÎ3C¾oý>ÛœTXÕÖq¢‡XŽÈå ØOF±Ø• g=&“ QÑ/?UÃ[|¹ÿ­Mju©¨R0²­—ß^![¹_F8M:bwƒKc_ßä'·ÿ2œØpKCÒ§™t6Øñ§^Fîê 7ÚÐoÅcq¡hì(.m6g'&,ºÿþìßö“ïjµA}c›¹#þ»©‡þ‡X.m6í$¡ÔF…ò¤¢$oä \=)“ªÅ wµšÐÁƒkqpR6¨”{3^&ÔÆqËüÓÑ/áé\™2ÛO±kŽ8ßÃã.n 'b?IÜ}]¥êZþ(‚µ¼Œiƒ ÍÄBŠÅg%øïÅ.4Úgþ.üE©Ð+|ÓÁ]é(_I‰ìfݶ6¨«uÚÜ“e‹VæPv·rÏS{”¬ßˆFa¢ê´iF…ÇtWÒdº±|Íp»z׬³fwIä_Þ’”gõäìC9¸ë·\r°sWÙ[™BU ¸`b !>Zî|JG¤†rߌÃþ'»M¤!®%¥N鸋Ž[‹ž2¾0Ÿ$¸hØ©0šl‡§d(ÝL@t,wò™mκºîqZ @D™|zØ«§>ÿs=ï’0 óX“JÎ+¤ÏÜ»ª껞?†ûýné³D4hÙ$-BâÂX¿Ïºl,múoÞs@–væq9xÀS/ïâÝ1÷{y¯¥²6²-ê²ë‘4O®¿m”Ǫ̃¼¤°à6Wô‹¾öáïG':𥢠9¹äøä6}pj©ÃÉêŽÊ\*]7Kïˆ[à4eÞã£Õæ«Á1Ù¾*s^³Bü‘-¤p2ä{DÖƒ$¹{5ÕÍrC-±VÁv'—°Ì£`[[ë1„¹W쨓TsýIGŠr¨R7]AÝ„úz`Šð·Cöê=“WB•Xuvz‰!©_̲¤dÓÖLö½)_†µ¡2ûsuÔ"+ÄXõñnÅI~ÝÏnVáM|†.¢ô°©µRËêg¨L‚}7D6Oö§e:ç(!܈oæÕéø» „G”q‹Øüæç§üU«ÔpÑp6Tî&!0p‰ñìP„ÜP®e¤Jg'}!ÑÂÅÈö]À¼j@…Øí+]G“OrQó )ÄŽTˆQ^xì—D!ïK•“B¹$ð¯ÏG«pä«´Ý…aÌî0†—›ˆ¡:‡?ÙÇ–M¤u'™¶•:SËz&V ú`@çÎR$´ö^DEQÈE(²¼×¯ä¦Å£0«J̬%7ãaM0êE+}…4„Ù\É,SµÚt–© Ñ™¶4LGRe!Á–a›ÛÌ7~l?†žóé|FŠ”VEà;?Fµïe%æš^qAÝå_Vû¹(hô)`§n‚tÑŠ² Ë…ª°/œrsÝC†äì0$¥n}7È&É©AFIpl(±E%9Ò2 nbhRHøAf߃›}ç©´V¤ ?@LÊ?©ßíJVx•û –KÛ­ðx}=lÝ÷ÌH³{3ˆqo)ÒËó)àãÔ9ò1ʾbþú§š9k²prps¯.€ ¼½LõÝ aè¥êµ.#n_˜½G@ˆü†jÚÂE[åÁƒF}4ÅÕ³¬ë•ù7‘[7º˜<íÍ(´üEü¨KК< ¼eµg“œòˆ é ÍS*õ³ØÀ±fá¼R^‡G¿ ž$¼A}/ö8™>±üÝW¾À·^.à—œr~]$¡S«TNœN|Êo#‘i­ fÇpåÂÖßhT€#2ÝðîT ~¨ÄFäÇ*!'œú 8sqê.­ ŸE‘.ͤ߫î[5º†«µX’˜D"t¾‡( Å¿%?«a%¤ äÑñœx³Ìy´‚GÛ¶‡ð³Ö׆Š>6<6»Z ܳ®õJ9Ÿ,㥠©ÂöóÈr‘;ÛŒõ áà„þÆÁ³FJK[tfý™Ä‡öÒ[ðýFγ6œ &‘ûäˆ"rPà?68AªÏ?öøS$óZ'-=9OVõˆÖ±ïï<=ê(X©}pߦ*ƒV) Møù¤/Ó/7È3è©ðØYA#Z™´F Qºqá&Ìþ´>óä7Ký}¯{–§â¡“u®¬à;s\\Ì4Œ‚›“‘žne¨X“ÃT-¶42zg§¾tc«ašP ²|EN< PIZ_•R–Ào‹_{Œ.?ÿbȸVÆà<×AÈ=0&…ƒ ôÈÖÍÿî°‚¥Ú5Í<}u*yEñÏ~0 ýÏÛ¾Xh©ÜþêE[í~×[©–`,”í#Xöªö¾Ý j¶$”ùµñåÙì”ÜöKÌ÷È ŒÒb¥J(Ë2)ÒFâä>†Jp1ö`]V•ÕàÆö²úèa3<¾×|Ó»Ç.ÿý"í¬»‰C ü8{¤4!u´ß˜ C÷m­z ;-™7ÐC}¿ïüaŠšé‚ÿ)m‰q«tóAڤěöÊ–ƒºsÜ1¾¤°M ?›Ì[-ÞÁYq‹~!  Aº¸9+ùçbX…Ò™t.ÙÖ{÷³û›<†ãi}ÎcùùYï³ _bµZ1aFg¶ѯSgZl¦•OÿÊNPÓË=UŠÑ<–7ÃDή’I­®÷0˜²ápe#×?*ü¼åÂ÷!’y¾ŽokW¿•“«Ô…¾[DDY\bÉÇ– ÛhsÀF½‘6ÉÅ+°+Ä’LÂU¨Ë88µçi ^„u… )×°˜çé9JpA¦Qé¨o%ên”Þ­wƒux.Ã2q÷ÔÂËRˆ" —⎫áfsRºÒ:Û„èÃ$(¸Ë8ñ¬‹ëÁßVv¿ E»,Æ[ÛàŠóvð†ÁÇ7¸›c+@ÄHØÃ4©q áÊã0ž©¬H}Ø*­ú ³;±uå½ò¶QÚ©E.ÁÅY™(í~ÕN¹}úHò–c.êÃfÚgï÷k‹{«bðDyçÂé³*£ÒI—ÞþRïfµÔP–#Ê8û üüÒ½©j£RÛ|–É©ÐÑÉ$ΣxQyêœ{Ùä=°‡_›; z㯡‹î˜z êt%¿ÍÎñºÈÂGT\5 ËÞ$ Qì¼ûMBÈš¬0ÃÑÌÉ‚ÐA-á€% Sé××Xa~ƒ«Òfï”Ç™6 —,ë¿]}…a.ˆ$É åêàZ¿Fy„%,•öüY_zÜT©Ï4ß¼ã[=¢lFÞ“.¤•£ÿG`¡êNæx ‚9ù© #[I·`÷GU_¶öª»´L%öpòOÎ<­½î/(~ÓÇEPÝGR,³MUu¬žø+â’=:ð$¹RAÃÛ'A~"Y©QmÊo™Ç 1½hðÇV䔵J5“Âÿ>†09cÕ–Rf¸M¡’šÆÙ¡ççK"‰¬z»s•‹NŠ‘¸—J'ÁÌÄWÂP®îÚ„CïLC¥ó¶ØžŒlxÅBÐfèiAÙÒý˧žÊ·R¦ ¢&¸Ð¥êÍý®]ušÑ×ä\…Áâ ŒÂu\ÔÝìšGMó}¬\·%  ÛªÌÙׂW}O^ÇgwDzÅã³6:̓¢ŽO#ï-]‰Ô¡xDÎkÛ4–~~ÚøkL˃ouÊÕ>)U¼gÞC¼-ÌœŠ|^Êå}"¸XÐØ: ߯¬¸^†½g?óê† º5Uî÷}æ‚_Æžù¥ëE_h±-’ã<ª|Âþ³÷€êú8Ï©{¡MÝ<…FÔÓÍ%©¿À­êeÉ`E¤±~þXª¬Šêi£¡×@<çèd²> q~Fñk@¯¤꛽opЇ_ű—oÊQÊ¿€4tIïR5–¹/ÃD±GÂL)Wæñƒõ¨¾uošŒTÑ\;w”bêñß$èeÊ’HèKQÓÐÉ ÈÑ£X>DˆõÐ^ŒŠEWH®›»_\kª»?û.¤”Cõ4Ü!‡—8¸AŒÝñ/írcŽ¿³8m¬tø©çÐ…œð9Ÿú¢:‡¢wy ¡Â]=Š'ÃijxÁ'å¿kì]ºqô(Ïxn×ãpÁ®µŸ%i%ú(Y¢U„ÏàÎmt+Æ“ø'£lÈ/(˜óÔèT—C%r¯ÈyØè>æRÉÑ¥ =º¢ìˆ¬CÒers±QZ©r¹ü“nÕÇT~ÛƒI¡o´x¬8–;Ï7 Ä$Ú&qð©ºLO— Hâ÷€©H,U¸‡–ð9¦òQ"vB@ifÒô¬?-­=Þ‡ìZ½]\*wË~\oAdr¿K0»œÊ¦¼í àIAEðö$üÜ_žm4f¬f¤•‡§¶òT2G0ÎöOà ×#F“í*ZMdµÔ¬Ð3?¼qºï“cH<Æn€…•QA8Í@ ¡èÖݱnÄÕ¿‚³ë±6åaÒu±?h`xºqû*_ÖŒ“šEDYØ4.º:ÊàßW¹[gºÁ•~üVÌUnD¨4Éûwán6ìvB(¸o©œ —NÉ¿8x¢ÍÕ(_J)•» „¤5íÏ­Ëžž½*_™â¼à—¶^i°µH¼ÛFž¾šBÁå1j%‚¢ÇürÎÝfX7̇X ã¨.˜Ìåfܵ/ꤕÌôTÔGGN­bÐçœËÄ䢳ýÚÇ›Lœ+;& j%¥×,Œo|í/~ŸÁÃj±J•j—$½z jDŽšDéi{¥wKºÚj5òø¾á{gU…W¥B­xXå,Oùrç‹À)¿h”¶‡ö¸õŠ¥V€Ã#5 W‰VAE„ ÿÆû£:‡Õƒn‡î .L)mUtä>WI7,ß<ØôÔŸkc)•Ùj¨o[ØÔ¹J°w/‰GG& ~×Cm|¹ã=…‘R‹O›¼<û‹‰. žÿÆVž<½6D@% ·Ë…6D{ñ‡<ØDÍbF¥`¨Ì×c9ŸdÀž¸áø$v“/ÅZÐb~gßL¸* ʃSÝ„!6¢¡d²ŸR‚Ø|)W´l§âŒtÕô¯C ÜNÇS=^ƒòÉÃhs^³H]ýìttÌü—‹Ë!&¬Ð"Ã)V‡Dy·é«Lnn¤,Çd/Ra"ð´¿ë\[vr—¨þH9×Ðú÷õí÷-ÓN á[*ææ'\ŠØeK°®;¿%U•ÓOy"JmÿÕ?n_)’­FA:)¦@»åxr᣼¤Â匞éUa8S@¿ŒÇÓ«¾I~cY;“Ëâ-´Áh h_JAÀSõ®PÀ¹x")ãâ¿h5C/üÎðIÇ”õ¥ßuòËJƒ{®Ñšd¾^ÀÍȃjÆ oM¿âÈ w§Ìðki^:V_ ´.Läy\Üæè¥ 6S# IR¹ ï¢{Å%Ê6 ÖÆ!·)áÜìéÞWåí “äräPAt_ÉxÊ¥ep³ŠÓ3„¹?*ÂÍ$=ºà¼ôè·Ì[¬©Ñy_"1:/´pÊI³.rÙÍÔpë9†ðº–¥xžÔ`S¤¹£|™…g7èÒ/¢¿ŠýÊ|¦<ïγѩ)œñ~g,ÎY¯Î|tMA0£BDð™*+®S8á™ÏÃ.é{6µdžÐqŽEà"OðСz{®ÉjE<¥.k(¼eörª8Syˆ¼e%!uM ؈¤ó‡”“w üÆÒg¤¨ËöA˜ âGÇ[lïÉÕÏì",’x“ñO„ÕTÊ&iŒˆâ᨞Ås›¤˜Ju;Q-X‹ð zïQÎ}^<ä½å#ž#ɧ_‡ÚSN¢rò Ž 2÷cô1µ¶pQ`šxƒ-zCH /ÄHˆE–s&¹õ0I’¤óm\Oa›ÖØGlA÷|¦[P®D^?îŸ4Vã¼ÛŠ{‡®@¦˜%Þ4¾öðÇц¿O¦ÊSA™ELµœYi íg©m­$Í “ç®Dº²:³” ]àB/ Þ(UR0'¹…öX4TîËìáòúœrÝ(YøÔØyKÊãT ÿJ.É-‚âf¸Ó0›88åÖ^±–»Ò—ÒF"OGDö8ÈõÓ*ȹ¹h†1*“ÓÆûê“+Wd5é‹»Ci5ôýMéKé°³“Ú˜WáŠ%ÞTB?†GÅéßׂ¾% ª+üڰúåÓõ ¬d Ì ÷Shäkõ2ŸÈ) né©bà‚IÒ‘…eÜÃúäéÇ>xïŽ + ßýµ©,½•8f™ú{É›Ó)ÑðHÁp¥è¢v_{`Y‚>jäF¾8T¤ê#hÜÆ(s—ªV'b‹¡v@ä¯}ØMOx r)Cqã¤ô°$‹³ünÄ©|Ff°ìŠí Kñ£T9É/cŽ£Ô"ø å÷/ õ]£—ñ`†÷=Ú1Ê*ˆ-¾ÁÐH娽|‡„¨>÷òn)ý¡ÈUJiŸ s>¹æ©¸{ŽÉò¬?]¬Q‚”þ$ìÉ!Tá´‰ô!F_cñcÌoVõ ¨ò„¯“{ò0à’—nÛ**)D°UõÈ\ýýÊìaU*ñØ@‘Dƒ °™Ô%¹Î< ÿζ}wî³`ÂTLé ŽEØ~ÓuÑGùo‹¥#>¬~c€ê¼ÙI‰7‰5 Ï¡œÚ}XFBÞ?”Áþ„-tMá[h(Q·ìçÇR`‘°Å¶?Atc$°„ùÐ^?©±¤j¨ÕuÓxóçölhŸT'“ <9yW·LºO4ÍEØbíb$;4/ó Eõ©ƒÊù€Ž×"7òäÊØî5®÷_ëôiPlˆMX–¨aRêº}áéøG-T>6'@E´¹~š5Üï¤/9EU\õå„:zðÛðdÔÚ6¾ÁPw¥RØ òNLEÙüÀDœ3þZTÆ3Vž]s6u(nVxê¼ixíg§>¾fUÃÁs-¾F.Ð$Ž’“TAöƒEÁ‰0¶ÈÂÇãHËÐhCL!±u‘ú…þ~_±}ʬB‡¡`®ÅÐCþ£g.b`hwfU,ÎÝÝmË(Î'п8=l½ÛX®„§rD5ÈWu¦o¬ûߣËjóN?îÙ\ãB\I+ÈÕºeR°œèkþ¾K»’bnºXÆîðwU3<üÝvuþ=؇e¶oûÏ¥üäFõsï’X5EÚ*o]¢X<¿íÝÙ4ƒÚŽÈ,Ö¡{¾í“ÓÆY†ësÖ¾‡àIÕþp]<̆c—ƒÅŽÃÍ ±wz.ëàIåŽGËþ)S}Ô¢öÚHÝý©GðH×Ãé4«¼ø"aΠprB8[ ¤©©såÔ&é²<#aÏ 7lø MeÕÀõ׿žœÚÀá2ÏNHžÜ1,m/'5.âËFbÞe’@ª œ‘ _¨HZˆ'6KƒÖ26˜ÐÀœ_ŸäÒ?8 Lu}Òm_Õ>xq3b†˜ò$*Ý×iÆ2¨p•É"Uý²T„é&êW‘¸£'‘QÖÒ*¨8ðMwxí Ù™‰ý?#¹ÈCMho.R¬ê–‰ÉFûÏ}Y)”λ0óBñµD‚È"à¨ûµ‹z\³’¿*ϸ´ø#y–µ–¶T: žáZf) Å2ÈÕ¨ð‘Y%̆(3!g¹Ü‡9„³ži6+ÜüËëà1 ßZT¡¼ÆE!LÓEÛà›ã°?>Ê,* ¸¢]xíÏ´±Š­Aö œn“0QŠÓKç*˜DP=ùË-ƒR¼Ü’ÄñÚY,Î" vJÞÑÛ?·äƒQ.£ÁA¢Ø¸Q_^Àw×$vzd+Q–3lÍM$<õ€ç!qZež>ØØìSDBqš–9X„o uàî ­ çDўݒ|»œn¢BÙæ€ÈŠÌ–æ Q°dâDXÄî pϮή`Küõ2]ËÆi s»tÎé™aÂnM38ˆaE~?¯ ¨‚|@AëÛ¢f§;šúcZ (¹7IæÖžãÞηõÚŠãD*ìÜ2]~à ›ˆ@©+Ûl/ÚHú8GìæÁcâ$ýy1í2~ÐÜKh?A©­¿ˆaêàyQˆ©i1J_¾'¬ÔJcd‘®ZdÌÎóØ[`EÒ©|D+ÔõÜiãMœˆSÑ…´ìöë4hZ\3Û‹w½±BÇ ß愛܇³h™žµØ‡‹->ëëÍùK™ø¶Ó‘½ý.ÛZÏ›Ó,O]ê Ýë°•¦ÍCŸªÎ§y—„TMc¥Ô (µlþ$VŸå¼ÀJv0¥äFWÅÛ“fØ_ó±é :î–ð`¶ÙEM³ëF)ÀÛÛȞˡQDA-;öõ– ‹ã_‘Åxƒ—§hÒ²B±@iÊ " Ñ7B)"kEjñ0±Ö*vÖ»–LÞïk§ e>šþ×Çy¦úcçÌX‹¯€7ï ÎDêðâ]O²ä÷ Ótà×_¼Xç/¾ðÍÍv'þÝ·ý% ñ1p¿…•ŠÚÆJ,VK‚lE:^ÞlY0­´p©› bmˆ…jy‡.oø V™H{‚uM+¬ôÀè6dîsu{•YŠSQ&â4¶ã¾2lCåg.÷}ÐþÕÍýh¸«åËd鵚Ië9¨Y͛˛ ʨ}YÞÙžñàbiÕbó•è¥ÜyßÒžŠA)Ÿðð—<-qfcFËz(É\ûͯÏ N&›¨Å  eaךkÐèš]Ã*=¹6,uÜJHîsñÍB“ €g™‚È®ŸÁŸnëfYµaš³BWEÊ¥§Ê [‰gë.¼K’®Ò^›e¼Á'ÌoçÞ?fÌ?öîèh–S÷ƒw ýÎñojęјä“z¼m“<†(J”DóDÎ#ë'¥³*>P8 n(1i£G*Vè ç.‰ô˜m<ß(1øãÙÕw°…U³jÆzö%3v"–êH ªÆ,2ƒŸ<öÝ­ò ™ú£¾]ð"sÈ$´ŒeŒ'Eëö‘/ïZòDÿî¥ÝÇ,³E¸H’=i=¥´¿&R|„ÎR¹At;²D „•ëü¦’¼&¡s‰YžbgHÏ•,õypu}ú!Bi%]ZKÑ÷Lu]HnÀ†gÜ“@u;mKuóh¡‹Djñ8%p€Øÿ`Êß2,ºe=|LÝ_˜K¤K( Ù:ªÚ¡ci‡¢v4†˜ªäˆˆ3¸±»ºŸÊYª3(×:3P*4ü¤SÔUòîpV.Ò C÷Yº5ŠA¹Â ^oŠç$ÈÅÝÀW­¾ß\ÇF5ä Ni^ÇkE ¬V¹R—$û"û®?d)GŽhýfñKGDZ£qvà“"ÑotÖ¢yºtI2……ô. šÎ)"] ŒSñ‘Bɧº¬ãåYs–2+Xƒ$™cÏâÒ%L<¦ØRÓa K‚ q/[óS1xrÛGE†lÈ–zNö rý„#ï c'MÑ9ƒt‘ËŒGáðð¬Þ‡°ࢠ‰e"ÇþÓeôµpÆþ-!O*"S>âÙO´ ¹ t_[ò¹<¦CP뚟î)ã´)`eZ‹ õ¹ëµÏ6ײ§Å‡ÝÃÔºËýÑ-¯çú³Fëw–c1WüX©o®Ÿ «bFUB©P0w¬Š6Ÿ£KÔä¨y®Á\GÑý°žÜª^›Y|¦YQU-­I›[µ±eSª:ìôÎò¾8×!ƉÛfùÚf³§ƒÐ~:“l–Rƒ? ÝL½2¤Ú,ví²T&pêñžm_¨à: Ç HrgyØÑš–‚ƒÏ½Ír«[ôH©Zn,ãÜx“Ÿ}i I‘Ñ>5;Ô‹½Ö;St®íÌ÷€]sínš(àøö(åWž17®³›‰4bAäúà[ 2J¯5q¤?f&öeoÂ6FšÒœ»ý ‹´‹œÒÚx wZã!’¸ê¿ñ¯k™uÝ£o™}¥’«J¥ ~Ãí ¬Û¹dpG£_SWîŒÄFÞ›Îò> -nÞ:÷òÒ­ŽñÅu¹øí» ~𮋹ï=§[vê–GˆsôËøÜvW[³œ•$gè $:3ÞnBë‹,qºã60–åÏ>¸¬±|9(=Óoœ^žOG·)Ö ­}­ÔÖMÿÏÅcÇÌýX>IÁJŠàa!§¡¾’NµÒž×÷ÛqúA¦Â;û¹Ü‚ F>OòãŒàf³œêzÞÓ,‡ºsïŽÁç^+EîLRBĸÓךá280÷A±ƒS½:¼æóv‡hŸ|¦·Sû<H<-<~j¬úèa°c?¹§´ìà;‘mèEÂ6Ûý·Íe¾Vë¾"W}üX%^f»%äÊ¢MA’p£4Ú±‰õ‘¸wVh„#àØÑ““Ÿ#¯T¡rpàèRy¤J3vçÒÚÙMTï¤8Œ#Ä“Tô{ü™ÊPŽ Z 'ßëràý¨›vò!DåH\e-®) 4Sh(ºuæ¥ì‰µ|–R„†‚>é‚“_Ò0ƒé›ûvï]üNîýÞ{9‘ÎÓ¦øGÅ æ¥êA ­ÒTÁÍüŒqÚÁýy{žX;té|4ypIâOêÀp yÄ9И‹|ŸºT#£ÚT\ õæeüÄ-ï O)—ĺ#S¥;Q\´ŽÑº³ÍcÆa›%E+~;ß›ó¢p!òÑ̓ç¹ÞÚÞ=bà §ºáFœ4 fn–íúkn5©  Ì3€Ë:}#cÆ·©}ñ³V¢ÜËÅ9—rOñÌðÆ[±«~£ËS{)Õízýœ¹/Lµq %¤±;0ù©µèx×ß ÊãJšjÇVÜëXŠóÄ äwŽÔÀÁÊîáâF\gÿ¦“kÆ×ªëH<ÚùÇêác‘?òäcy•!xÖÒ\Å€¥‹¥éÊQéd©ÁÙ½L‡ ³®¥Â¿¬ÀËëòe©µ;c ëu‘WqX-NìK𬠦ùª¶Ï]¯áË=v¸#FWaOÖ”RøS“‘²¤µ¥“IY&xzFʽÍBÝÛÏcͰÑ›þ’çQpë(½yËmÐÊ8£ø¥á`Eð¬6•> “( xË H§ìP œ $šûÑùGVÄ1˜6õW=—ÁñŤ•Ó“›¹ÓÖ*e*VFñåuâ‹çWV­‹ðUã¥ÍòŸÊW-?•6{Ùü€=|õ·Wy/é/ù·p°î´ÄW/Ͷ¸€™Ž¦!úbžŒÀ×@_7åÁsôt¿ßÎ]„Y¿;iœ}‰šÍO!HæE £3ŠBM«î‘/¶= ²–>ß•k½8Aõ%ª4ÂÛYÛ[ý·ù_8ÁØÙ–æË€°àö»”Jí[¢,É»¼¹%sOe<(C¹Öòš¥ð– Ãü-g…+רŒQ÷EP0{@X´ðuÎ@Öf/"ö, ¼ˆwùì ma(á]}*XAàj[ ÌËi°Ü-ý~?AÜãÕ[ñ¸Ž^|¹“ߣwýr]±Š£ÒÉ5½G7ÊĈ¤ô Â3^âÁ#Þ fÀÙÊÑèæK­ó+ÍEüVpÂÇ}—]¿Õ_9ò[Á…·ö®Üåâv¾@8÷ÖÙÍñÎR%‘KAÖÀâíIrËT÷îiÁ‰9>…ÄÏg¿Ý¥äº?|ÞqÐ4#ü’d]åU ø­PÁo÷«–m£µM¥¾Ò$§#8e™¸B§Ð±ilŸ«’Xá yïRë~*¾‹SEÌàzÅýãÙò/19¥¹*æLâC­«D5a©À£[¨pËwøe0ª§¡ š}o Ø2åú¥•~úþÈæ¦t"í˜ ÇΛGMö6M«EUHZ­_rÎ){ ›ú oO–²Ìdƒ.×NÛ½eþ–:B9±aÍ(ú™Ê·¬Ð3Xr% ÒÒV¾‘ž‘ÁÜC?DÓûÁ¹¥h½¿+-º¸ë`*²E<Ú«-jm£È@¦žQðuöŒpç"Œ]×§gøÅ¯ ÕqÚQë°&ò #˜CtÏ·¬°£°¯Œ¢ã³ë@ël|,ëÊÆû>ƒåé’¥ö]”Î<8ÿÑÖ 7TêÉ×YVø=·¦¹ÁP†¼U:¸ÈšqT‚«·„Jb¢ ŒžSÐ9iÅL6Ö¯ãd/léªJ—用}‰ ¤ÄßM>„`|ŸÓ;Švn›ee½VÀРLþÖ!­]•:ɸ#HN}ê‚ )õËçÌ[vÃÁ?¦îÔgMdjì¯uÿ¨…þ”Ò9*—d¯½k‡Q–Ö·î–MYI‡¤[rŽÜž–«þÊ??|é' {–BÔ6WÉ %–Lˆ¿½L&»Þpï@ý)íÁL€K¯(º±7šˆÆ†á¢$Ÿ=.ã#¥l·" úxKªc™âS¥Ç_­o¹US€n¹âãp9{{ÿÖ9*ù=üB#ñÓË Á z^ïÚ¶CðªeóXß+ÛyusíIob8½‘ç(Ýüš¤ÔÜùQFJo‰mÂ+ú26¬¬RSåO5ð–ç´$ÄæŒ ¯ƒ‹šL.‘|!ÕŠˆóçV%î+ž‰x ‹6OØrBë+ =´BúVàƒó%D­ŽÜg¢vÜ]½ý@ª¬N²¼%õ‰cWH 6*•ìIj±V‚2Æ[¡–c·/‰5Ù†’S@`)¯{½”‡b'€fj›Ÿj›H’?Ð;šö~žÈÙbÄ·ù©º·ä2z;‚[jĶL bÎÜè@0Ñþ¹É×Þ„Û#è”R¯±ôQ„•®¤ N‚³Ìy}±«B¿%8橽ÈçÀN¥ ò$oeªB0‘­Ê ’&ä?ÚÕý¨HÚ±tŸ!t,‡ö†rÑ®ëòÆW¼B׊›™»Â4®£a–p¹Ë­ˆ¯œRŽh'ôs`L´¹bIÔ¢à%”P<Œ0M¥x ÙèE*Ù&t!Æ[ῘœÀ—‰–Ðᢊpà³FE°¥ŒÂ h2œêصô‰>–½qñ/«Öý¡åâ* šð{œ†ÈT^@‚åÕÇ{”y#ƒ™[dz ›ýìðølëiDO< ʃV³Ó꜉¹ì](`ù4{z«ÀãàÐ5Zµ”FðÓ\¼å35"€.<› =q,ܪ'Š3]ŸÂ %·ç&ðÇ`¼x’ŽxÁ(»Å³ˆÃò˜Ç¹ÿt~goü”¹Ç"ˆQ,“Â%DÝ{ásK«…ï`˜Pÿm „&øø^›ª= ÙZËëstÊÙj·-uà¡×_óäKIJµ ¿ÌÝ[ÑŠ·©ï†3v%“»Ø¡¸ú¥=‘ÕPªnãO{Õú¾YzÉ;ØohÒE¨LÆ |W—Z«ëõi©»»³rGPÜîb­ ‘¶tü$ù¹ ¬;;+•CG·j±ß×b×®€<Û“kÿùio®øêêË%ÖÕW¥Ü«°d§¢J*¨ioÜ%qð#—òj)Qz×^jŽx4)œ ”\±'\%‹D<„̈K“‹áœ_•²˜Æ©쉫ïF£\á¯Ëõê!2(\b]^Œ@žò,“ ÁrväÎÎ,5âðãÇù;?•Ä?Á+ï­šÉ14™Ú‹ÿˆ1¢ÌžðS‰ÿ©Á*-CGóÇÃöqG-ãÄŸ/‹?=¡j$gº~\þ© WmÎãý8þQ\Ž\>’Ê1·Úa½K?ö»ÿ½»íúûînGòîßUÿq+çûøÿ ‚a(ƒnetworkx-1.8.1/examples/graph/unix_email.mbox0000664000175000017500000000325512177456333021242 0ustar aricaric00000000000000From alice@edu Thu Jun 16 16:12:12 2005 From: Alice Subject: NetworkX Date: Thu, 16 Jun 2005 16:12:13 -0700 To: Bob Status: RO Content-Length: 86 Lines: 5 Bob, check out the new networkx release - you and Carol might really like it. Alice From bob@gov Thu Jun 16 18:13:12 2005 Return-Path: Subject: Re: NetworkX From: Bob To: Alice Content-Type: text/plain Date: Thu, 16 Jun 2005 18:13:12 -0700 Status: RO Content-Length: 26 Lines: 4 Thanks for the tip. Bob From ted@com Thu Jul 28 09:53:31 2005 Return-Path: Subject: Graph package in Python? From: Ted To: Bob Content-Type: text/plain Date: Thu, 28 Jul 2005 09:47:03 -0700 Status: RO Content-Length: 90 Lines: 3 Hey Ted - I'm looking for a Python package for graphs and networks. Do you know of any? From bob@gov Thu Jul 28 09:59:31 2005 Return-Path: Subject: Re: Graph package in Python? From: Bob To: Ted Content-Type: text/plain Date: Thu, 28 Jul 2005 09:59:03 -0700 Status: RO Content-Length: 180 Lines: 9 Check out the NetworkX package - Alice sent me the tip! Bob >> bob@gov scrawled: >> Hey Ted - I'm looking for a Python package for >> graphs and networks. Do you know of any? From ted@com Thu Jul 28 15:53:31 2005 Return-Path: Subject: get together for lunch to discuss Networks? From: Ted To: Bob , Carol , Alice Content-Type: text/plain Date: Thu, 28 Jul 2005 15:47:03 -0700 Status: RO Content-Length: 139 Lines: 5 Hey everyrone! Want to meet at that restaurant on the island in Konigsburg tonight? Bring your laptops and we can install NetworkX. Ted networkx-1.8.1/examples/graph/degree_sequence.py0000664000175000017500000000157412177456333021720 0ustar aricaric00000000000000#!/usr/bin/env python """ Random graph from given degree sequence. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" __date__ = "$Date: 2004-11-03 08:11:09 -0700 (Wed, 03 Nov 2004) $" __credits__ = """""" __revision__ = "$Revision: 503 $" # Copyright (C) 2004 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. from networkx import * z=[5,3,3,3,3,2,2,2,1,1,1] print is_valid_degree_sequence(z) print("Configuration model") G=configuration_model(z) # configuration model degree_sequence=list(degree(G).values()) # degree sequence print("Degree sequence %s" % degree_sequence) print("Degree histogram") hist={} for d in degree_sequence: if d in hist: hist[d]+=1 else: hist[d]=1 print("degree #nodes") for d in hist: print('%d %d' % (d,hist[d])) networkx-1.8.1/examples/graph/knuth_miles.py0000664000175000017500000000566212177456333021121 0ustar aricaric00000000000000#!/usr/bin/env python """ An example using networkx.Graph(). miles_graph() returns an undirected graph over the 128 US cities from the datafile miles_dat.txt. The cities each have location and population data. The edges are labeled with the distance betwen the two cities. This example is described in Section 1.1 in Knuth's book [1,2]. References. ----------- [1] Donald E. Knuth, "The Stanford GraphBase: A Platform for Combinatorial Computing", ACM Press, New York, 1993. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html """ __author__ = """Aric Hagberg (hagberg@lanl.gov)""" # Copyright (C) 2004-2006 by # Aric Hagberg # Dan Schult # Pieter Swart # All rights reserved. # BSD license. import networkx as nx def miles_graph(): """ Return the cites example graph in miles_dat.txt from the Stanford GraphBase. """ # open file miles_dat.txt.gz (or miles_dat.txt) import gzip fh = gzip.open('knuth_miles.txt.gz','r') G=nx.Graph() G.position={} G.population={} cities=[] for line in fh.readlines(): line = line.decode() if line.startswith("*"): # skip comments continue numfind=re.compile("^\d+") if numfind.match(line): # this line is distances dist=line.split() for d in dist: G.add_edge(city,cities[i],weight=int(d)) i=i+1 else: # this line is a city, position, population i=1 (city,coordpop)=line.split("[") cities.insert(0,city) (coord,pop)=coordpop.split("]") (y,x)=coord.split(",") G.add_node(city) # assign position - flip x axis for matplotlib, shift origin G.position[city]=(-int(x)+7500,int(y)-3000) G.population[city]=float(pop)/1000.0 return G if __name__ == '__main__': import networkx as nx import re import sys G=miles_graph() print("Loaded miles_dat.txt containing 128 cities.") print("digraph has %d nodes with %d edges"\ %(nx.number_of_nodes(G),nx.number_of_edges(G))) # make new graph of cites, edge if less then 300 miles between them H=nx.Graph() for v in G: H.add_node(v) for (u,v,d) in G.edges(data=True): if d['weight'] < 300: H.add_edge(u,v) # draw with matplotlib/pylab try: import matplotlib.pyplot as plt plt.figure(figsize=(8,8)) # with nodes colored by degree sized by population node_color=[float(H.degree(v)) for v in H] nx.draw(H,G.position, node_size=[G.population[v] for v in H], node_color=node_color, with_labels=False) # scale the axes equally plt.xlim(-5000,500) plt.ylim(-2000,3500) plt.savefig("knuth_miles.png") except: pass networkx-1.8.1/setup.py0000664000175000017500000001104412177456333015007 0ustar aricaric00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- """ Setup script for networkx You can install networkx with python setup_egg.py install """ from glob import glob import os import sys if os.path.exists('MANIFEST'): os.remove('MANIFEST') from distutils.core import setup if sys.argv[-1] == 'setup.py': print("To install, run 'python setup.py install'") print() if sys.version_info[:2] < (2, 6): print("NetworkX requires Python version 2.6 or later (%d.%d detected)." % sys.version_info[:2]) sys.exit(-1) # Write the version information. sys.path.insert(0, 'networkx') import release version = release.write_versionfile() sys.path.pop(0) packages=["networkx", "networkx.algorithms", "networkx.algorithms.assortativity", "networkx.algorithms.bipartite", "networkx.algorithms.centrality", "networkx.algorithms.chordal", "networkx.algorithms.community", "networkx.algorithms.components", "networkx.algorithms.connectivity", "networkx.algorithms.flow", "networkx.algorithms.traversal", "networkx.algorithms.isomorphism", "networkx.algorithms.shortest_paths", "networkx.algorithms.link_analysis", "networkx.algorithms.operators", "networkx.algorithms.approximation", "networkx.classes", "networkx.external", "networkx.external.decorator", "networkx.generators", "networkx.drawing", "networkx.linalg", "networkx.readwrite", "networkx.readwrite.json_graph", "networkx.tests", "networkx.testing", "networkx.utils"] if sys.version >= '3': packages.append('networkx.external.decorator.decorator3') else: packages.append('networkx.external.decorator.decorator2') docdirbase = 'share/doc/networkx-%s' % version # add basic documentation data = [(docdirbase, glob("*.txt"))] # add examples for d in ['advanced', 'algorithms', 'basic', '3d_drawing', 'drawing', 'graph', 'multigraph', 'pygraphviz', 'readwrite']: dd=os.path.join(docdirbase,'examples',d) pp=os.path.join('examples',d) data.append((dd,glob(os.path.join(pp,"*.py")))) data.append((dd,glob(os.path.join(pp,"*.bz2")))) data.append((dd,glob(os.path.join(pp,"*.gz")))) data.append((dd,glob(os.path.join(pp,"*.mbox")))) data.append((dd,glob(os.path.join(pp,"*.edgelist")))) # add the tests package_data = { 'networkx': ['tests/*.py'], 'networkx.algorithms': ['tests/*.py'], 'networkx.algorithms.assortativity': ['tests/*.py'], 'networkx.algorithms.bipartite': ['tests/*.py'], 'networkx.algorithms.centrality': ['tests/*.py'], 'networkx.algorithms.chordal': ['tests/*.py'], 'networkx.algorithms.community': ['tests/*.py'], 'networkx.algorithms.components': ['tests/*.py'], 'networkx.algorithms.connectivity': ['tests/*.py'], 'networkx.algorithms.flow': ['tests/*.py'], 'networkx.algorithms.traversal': ['tests/*.py'], 'networkx.algorithms.isomorphism': ['tests/*.py','tests/*.*99'], 'networkx.algorithms.link_analysis': ['tests/*.py'], 'networkx.algorithms.approximation': ['tests/*.py'], 'networkx.algorithms.operators': ['tests/*.py'], 'networkx.algorithms.shortest_paths': ['tests/*.py'], 'networkx.algorithms.traversal': ['tests/*.py'], 'networkx.classes': ['tests/*.py'], 'networkx.generators': ['tests/*.py'], 'networkx.drawing': ['tests/*.py'], 'networkx.linalg': ['tests/*.py'], 'networkx.readwrite': ['tests/*.py'], 'networkx.readwrite.json_graph': ['tests/*.py'], 'networkx.testing': ['tests/*.py'], 'networkx.utils': ['tests/*.py'] } if __name__ == "__main__": setup( name = release.name.lower(), version = version, maintainer = release.maintainer, maintainer_email = release.maintainer_email, author = release.authors['Hagberg'][0], author_email = release.authors['Hagberg'][1], description = release.description, keywords = release.keywords, long_description = release.long_description, license = release.license, platforms = release.platforms, url = release.url, download_url = release.download_url, classifiers = release.classifiers, packages = packages, data_files = data, package_data = package_data ) networkx-1.8.1/doc/0000775000175000017500000000000012177457361014044 5ustar aricaric00000000000000networkx-1.8.1/doc/make_examples_rst.py0000775000175000017500000001250212177456333020122 0ustar aricaric00000000000000#!/usr/bin/env python """ generate the rst files for the examples by iterating over the networkx examples """ # This code was developed from the Matplotlib gen_rst.py module # and is distributed with the same license as Matplotlib import os, glob import os import re import sys #fileList = [] #rootdir = '../../examples' def out_of_date(original, derived): """ Returns True if derivative is out-of-date wrt original, both of which are full file paths. TODO: this check isn't adequate in some cases. Eg, if we discover a bug when building the examples, the original and derived will be unchanged but we still want to fource a rebuild. We can manually remove from _static, but we may need another solution """ return (not os.path.exists(derived) or os.stat(derived).st_mtime < os.stat(original).st_mtime) def main(exampledir,sourcedir): noplot_regex = re.compile(r"#\s*-\*-\s*noplot\s*-\*-") datad = {} for root, subFolders, files in os.walk(exampledir): for fname in files: if ( fname.startswith('.') or fname.startswith('#') or fname.startswith('_') or fname.find('.svn')>=0 or not fname.endswith('.py') ): continue fullpath = os.path.join(root,fname) contents = file(fullpath).read() # indent relpath = os.path.split(root)[-1] datad.setdefault(relpath, []).append((fullpath, fname, contents)) subdirs = datad.keys() subdirs.sort() output_dir=os.path.join(sourcedir,'examples') if not os.path.exists(output_dir): os.makedirs(output_dir) fhindex = file(os.path.join(sourcedir,'examples','index.rst'), 'w') fhindex.write("""\ .. _examples-index: ***************** NetworkX Examples ***************** .. only:: html :Release: |version| :Date: |today| .. toctree:: :maxdepth: 2 """) for subdir in subdirs: output_dir= os.path.join(sourcedir,'examples',subdir) if not os.path.exists(output_dir): os.makedirs(output_dir) static_dir = os.path.join(sourcedir, 'static', 'examples') if not os.path.exists(static_dir): os.makedirs(static_dir) subdirIndexFile = os.path.join(subdir, 'index.rst') fhsubdirIndex = file(os.path.join(output_dir,'index.rst'), 'w') fhindex.write(' %s\n\n'%subdirIndexFile) #thumbdir = '../_static/plot_directive/mpl_examples/%s/thumbnails/'%subdir #for thumbname in glob.glob(os.path.join(thumbdir,'*.png')): # fhindex.write(' %s\n'%thumbname) fhsubdirIndex.write("""\ .. _%s-examples-index: ############################################## %s ############################################## .. only:: html :Release: |version| :Date: |today| .. toctree:: :maxdepth: 1 """%(subdir, subdir.title())) data = datad[subdir] data.sort() #parts = os.path.split(static_dir) #thumb_dir = ('../'*(len(parts)-1)) + os.path.join(static_dir, 'thumbnails') for fullpath, fname, contents in data: basename, ext = os.path.splitext(fname) static_file = os.path.join(static_dir, fname) #thumbfile = os.path.join(thumb_dir, '%s.png'%basename) #print ' static_dir=%s, basename=%s, fullpath=%s, fname=%s, thumb_dir=%s, thumbfile=%s'%(static_dir, basename, fullpath, fname, thumb_dir, thumbfile) rstfile = '%s.rst'%basename outfile = os.path.join(output_dir, rstfile) fhsubdirIndex.write(' %s\n'%rstfile) if (not out_of_date(fullpath, static_file) and not out_of_date(fullpath, outfile)): continue print '%s/%s'%(subdir,fname) fhstatic = file(static_file, 'w') fhstatic.write(contents) fhstatic.close() fh = file(outfile, 'w') fh.write('.. _%s-%s:\n\n'%(subdir, basename)) base=fname.partition('.')[0] title = '%s'%(base.replace('_',' ').title()) #title = ' %s example code: %s'%(thumbfile, subdir, fname) fh.write(title + '\n') fh.write('='*len(title) + '\n\n') pngname=base+".png" png=os.path.join(static_dir,pngname) linkname = os.path.join('..', '..', 'static', 'examples') if os.path.exists(png): fh.write('.. image:: %s \n\n'%os.path.join(linkname,pngname)) linkname = os.path.join('..', '..', '_static', 'examples') fh.write("[`source code <%s>`_]\n\n::\n\n" % os.path.join(linkname,fname)) # indent the contents contents = '\n'.join([' %s'%row.rstrip() for row in contents.split('\n')]) fh.write(contents) # fh.write('\n\nKeywords: python, matplotlib, pylab, example, codex (see :ref:`how-to-search-examples`)') fh.close() fhsubdirIndex.close() fhindex.close() if __name__ == '__main__': import sys try: arg0,arg1,arg2=sys.argv[:3] except: arg0=sys.argv[0] print """ Usage: %s exampledir sourcedir exampledir: a directory containing the python code for the examples. sourcedir: a directory to put the generated documentation source for these examples. """%arg0 else: main(arg1,arg2) networkx-1.8.1/doc/sphinxext/0000775000175000017500000000000012177457361016076 5ustar aricaric00000000000000networkx-1.8.1/doc/sphinxext/docscrape_sphinx.py0000664000175000017500000001711712177456333022011 0ustar aricaric00000000000000import re, inspect, textwrap, pydoc import sphinx from docscrape import NumpyDocString, FunctionDoc, ClassDoc class SphinxDocString(NumpyDocString): def __init__(self, docstring, config={}): self.use_plots = config.get('use_plots', False) NumpyDocString.__init__(self, docstring, config=config) # string conversion routines def _str_header(self, name, symbol='`'): return ['.. rubric:: ' + name, ''] def _str_field_list(self, name): return [':' + name + ':'] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): return [''] if self['Signature']: return ['``%s``' % self['Signature']] + [''] else: return [''] def _str_summary(self): return self['Summary'] + [''] def _str_extended_summary(self): return self['Extended Summary'] + [''] def _str_param_list(self, name): out = [] if self[name]: out += self._str_field_list(name) out += [''] for param,param_type,desc in self[name]: out += self._str_indent(['**%s** : %s' % (param.strip(), param_type)]) out += [''] out += self._str_indent(desc,8) out += [''] return out @property def _obj(self): if hasattr(self, '_cls'): return self._cls elif hasattr(self, '_f'): return self._f return None def _str_member_list(self, name): """ Generate a member listing, autosummary:: table where possible, and a table where not. """ out = [] if self[name]: out += ['.. rubric:: %s' % name, ''] prefix = getattr(self, '_name', '') if prefix: prefix = '~%s.' % prefix autosum = [] others = [] for param, param_type, desc in self[name]: param = param.strip() if not self._obj or hasattr(self._obj, param): autosum += [" %s%s" % (prefix, param)] else: others.append((param, param_type, desc)) if autosum: out += ['.. autosummary::', ' :toctree:', ''] out += autosum if others: maxlen_0 = max([len(x[0]) for x in others]) maxlen_1 = max([len(x[1]) for x in others]) hdr = "="*maxlen_0 + " " + "="*maxlen_1 + " " + "="*10 fmt = '%%%ds %%%ds ' % (maxlen_0, maxlen_1) n_indent = maxlen_0 + maxlen_1 + 4 out += [hdr] for param, param_type, desc in others: out += [fmt % (param.strip(), param_type)] out += self._str_indent(desc, n_indent) out += [hdr] out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += [''] content = textwrap.dedent("\n".join(self[name])).split("\n") out += content out += [''] return out def _str_see_also(self, func_role): out = [] if self['See Also']: see_also = super(SphinxDocString, self)._str_see_also(func_role) out = ['.. seealso::', ''] out += self._str_indent(see_also[2:]) return out def _str_warnings(self): out = [] if self['Warnings']: out = ['.. warning::', ''] out += self._str_indent(self['Warnings']) return out def _str_index(self): idx = self['index'] out = [] if len(idx) == 0: return out out += ['.. index:: %s' % idx.get('default','')] for section, references in idx.iteritems(): if section == 'default': continue elif section == 'refguide': out += [' single: %s' % (', '.join(references))] else: out += [' %s: %s' % (section, ','.join(references))] return out def _str_references(self): out = [] if self['References']: out += self._str_header('References') if isinstance(self['References'], str): self['References'] = [self['References']] out.extend(self['References']) out += [''] # Latex collects all references to a separate bibliography, # so we need to insert links to it if sphinx.__version__ >= "0.6": out += ['.. only:: latex',''] else: out += ['.. latexonly::',''] items = [] for line in self['References']: m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I) if m: items.append(m.group(1)) out += [' ' + ", ".join(["[%s]_" % item for item in items]), ''] return out def _str_examples(self): examples_str = "\n".join(self['Examples']) if (self.use_plots and 'import matplotlib' in examples_str and 'plot::' not in examples_str): out = [] out += self._str_header('Examples') out += ['.. plot::', ''] out += self._str_indent(self['Examples']) out += [''] return out else: return self._str_section('Examples') def __str__(self, indent=0, func_role="obj"): out = [] out += self._str_signature() out += self._str_index() + [''] out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_warnings() out += self._str_see_also(func_role) out += self._str_section('Notes') out += self._str_references() out += self._str_examples() for param_list in ('Attributes', 'Methods'): out += self._str_member_list(param_list) out = self._str_indent(out,indent) return '\n'.join(out) class SphinxFunctionDoc(SphinxDocString, FunctionDoc): def __init__(self, obj, doc=None, config={}): self.use_plots = config.get('use_plots', False) FunctionDoc.__init__(self, obj, doc=doc, config=config) class SphinxClassDoc(SphinxDocString, ClassDoc): def __init__(self, obj, doc=None, func_doc=None, config={}): self.use_plots = config.get('use_plots', False) ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config) class SphinxObjDoc(SphinxDocString): def __init__(self, obj, doc=None, config={}): self._f = obj SphinxDocString.__init__(self, doc, config=config) def get_doc_object(obj, what=None, doc=None, config={}): if what is None: if inspect.isclass(obj): what = 'class' elif inspect.ismodule(obj): what = 'module' elif callable(obj): what = 'function' else: what = 'object' if what == 'class': return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc, config=config) elif what in ('function', 'method'): return SphinxFunctionDoc(obj, doc=doc, config=config) else: if doc is None: doc = pydoc.getdoc(obj) return SphinxObjDoc(obj, doc, config=config) networkx-1.8.1/doc/sphinxext/numpydoc.py0000664000175000017500000001307112177456333020306 0ustar aricaric00000000000000""" ======== numpydoc ======== Sphinx extension that handles docstrings in the Numpy standard format. [1] It will: - Convert Parameters etc. sections to field lists. - Convert See Also section to a See also entry. - Renumber references. - Extract the signature from the docstring, if it can't be determined otherwise. .. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard """ import sphinx if sphinx.__version__ < '1.0.1': raise RuntimeError("Sphinx 1.0.1 or newer is required") import os, re, pydoc from docscrape_sphinx import get_doc_object, SphinxDocString from sphinx.util.compat import Directive import inspect def mangle_docstrings(app, what, name, obj, options, lines, reference_offset=[0]): cfg = dict(use_plots=app.config.numpydoc_use_plots, show_class_members=app.config.numpydoc_show_class_members) if what == 'module': # Strip top title title_re = re.compile(ur'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*', re.I|re.S) lines[:] = title_re.sub(u'', u"\n".join(lines)).split(u"\n") else: doc = get_doc_object(obj, what, u"\n".join(lines), config=cfg) lines[:] = unicode(doc).split(u"\n") if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ obj.__name__: if hasattr(obj, '__module__'): v = dict(full_name=u"%s.%s" % (obj.__module__, obj.__name__)) else: v = dict(full_name=obj.__name__) lines += [u'', u'.. htmlonly::', ''] lines += [u' %s' % x for x in (app.config.numpydoc_edit_link % v).split("\n")] # replace reference numbers so that there are no duplicates references = [] for line in lines: line = line.strip() m = re.match(ur'^.. \[([a-z0-9_.-])\]', line, re.I) if m: references.append(m.group(1)) # start renaming from the longest string, to avoid overwriting parts references.sort(key=lambda x: -len(x)) if references: for i, line in enumerate(lines): for r in references: if re.match(ur'^\d+$', r): new_r = u"R%d" % (reference_offset[0] + int(r)) else: new_r = u"%s%d" % (r, reference_offset[0]) lines[i] = lines[i].replace(u'[%s]_' % r, u'[%s]_' % new_r) lines[i] = lines[i].replace(u'.. [%s]' % r, u'.. [%s]' % new_r) reference_offset[0] += len(references) def mangle_signature(app, what, name, obj, options, sig, retann): # Do not try to inspect classes that don't define `__init__` if (inspect.isclass(obj) and (not hasattr(obj, '__init__') or 'initializes x; see ' in pydoc.getdoc(obj.__init__))): return '', '' if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return if not hasattr(obj, '__doc__'): return doc = SphinxDocString(pydoc.getdoc(obj)) if doc['Signature']: sig = re.sub(u"^[^(]*", u"", doc['Signature']) return sig, u'' def setup(app, get_doc_object_=get_doc_object): global get_doc_object get_doc_object = get_doc_object_ app.connect('autodoc-process-docstring', mangle_docstrings) app.connect('autodoc-process-signature', mangle_signature) app.add_config_value('numpydoc_edit_link', None, False) app.add_config_value('numpydoc_use_plots', None, False) app.add_config_value('numpydoc_show_class_members', True, True) # Extra mangling domains #app.add_domain(NumpyPythonDomain) app.add_domain(NumpyCDomain) #------------------------------------------------------------------------------ # Docstring-mangling domains #------------------------------------------------------------------------------ from docutils.statemachine import ViewList from sphinx.domains.c import CDomain from sphinx.domains.python import PythonDomain class ManglingDomainBase(object): directive_mangling_map = {} def __init__(self, *a, **kw): super(ManglingDomainBase, self).__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): for name, objtype in self.directive_mangling_map.items(): self.directives[name] = wrap_mangling_directive( self.directives[name], objtype) class NumpyPythonDomain(ManglingDomainBase, PythonDomain): name = 'np' directive_mangling_map = { 'function': 'function', 'class': 'class', 'exception': 'class', 'method': 'function', 'classmethod': 'function', 'staticmethod': 'function', 'attribute': 'attribute', } class NumpyCDomain(ManglingDomainBase, CDomain): name = 'np-c' directive_mangling_map = { 'function': 'function', 'member': 'attribute', 'macro': 'function', 'type': 'class', 'var': 'object', } def wrap_mangling_directive(base_directive, objtype): class directive(base_directive): def run(self): env = self.state.document.settings.env name = None if self.arguments: m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0]) name = m.group(2).strip() if not name: name = self.arguments[0] lines = list(self.content) mangle_docstrings(env.app, objtype, name, None, None, lines) self.content = ViewList(lines, self.content.parent) return base_directive.run(self) return directive networkx-1.8.1/doc/sphinxext/customroles.py0000664000175000017500000000755112177456333021035 0ustar aricaric00000000000000""" Custom Roles """ from docutils import nodes, utils from docutils.parsers.rst import roles from sphinx import addnodes from sphinx.util import ws_re, caption_ref_re # http://www.doughellmann.com/articles/how-tos/sphinx-custom-roles/index.html def sample_role(name, rawtext, text, lineno, inliner, options={}, content=[]): """Custom role. Parameters ---------- name : str The name of the role, as used in the document. rawtext : str The markup, including the role declaration. text : str The text to be marked up by the role. lineno : int The line number where `rawtext` appears. inliner : Inliner The instance that called the role. options : dict Directive options for customizatoin. content : list The directive content for customization. Returns ------- nodes : list The list of nodes to insert into the document. msgs : list The list of system messages, perhaps an error message. """ pass ################## prefixed_roles = { # name: (prefix, baseuri) 'arxiv': ('arXiv:', 'http://arxiv.org/abs/'), 'doi': ('doi:', 'http://dx.doi.org/'), } no_text_roles = [ 'url', 'pdf', ] def prefixed_role(name, rawtext, text, lineno, inliner, options={}, content=[]): prefix, baseuri = prefixed_roles[name] uri = baseuri + text display = utils.unescape(text) node = nodes.literal(prefix, prefix) ref = nodes.reference(rawtext, display, refuri=uri, **options) node += ref # keep it in the 'literal' background return [node], [] def url_role(name, rawtext, text, lineno, inliner, options={}, content=[]): uri = text display = 'url' node = nodes.literal('', '') node += nodes.reference(rawtext, name, refuri=uri, **options) return [node], [] def trac_ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]): app = inliner.document.settings.env.app try: base = app.config.trac_url if not base: raise AttributeError except AttributeError, err: msg = 'trac_url configuration value is not set (%s)' raise ValueError(msg % str(err)) slash = '/' if base[-1] != '/' else '' prefix = 'ticket ' node = nodes.literal(prefix, prefix) display = utils.unescape(text) uri = base + slash + 'ticket/' + text node += nodes.reference(rawtext, display, refuri=uri, **options) return [node], [] def trac_changeset_role(name, rawtext, text, lineno, inliner, options={}, content=[]): app = inliner.document.settings.env.app try: base = app.config.trac_url if not base: raise AttributeError except AttributeError, err: msg = 'trac_url configuration value is not set (%s)' raise ValueError(msg % str(err)) slash = '/' if base[-1] != '/' else '' unescaped = utils.unescape(text) prefix = 'changeset ' node = nodes.literal(prefix, prefix) # Hard-coded for NetworkX if unescaped.endswith('networkx-svn-archive'): # Use the integer display = unescaped.split('/')[0] else: # hg: use the first 12 hash characters display = unescaped[:12] uri = base + slash + 'changeset/' + text node += nodes.reference(rawtext, display, refuri=uri, **options) return [node], [] active_roles = { 'arxiv': prefixed_role, 'doi': prefixed_role, 'pdf': url_role, 'url': url_role, 'ticket': trac_ticket_role, 'changeset': trac_changeset_role, } # Add a generic docstring. for role in active_roles.values(): role.__doc__ = sample_role.__doc__ def setup(app): for role, func in active_roles.iteritems(): roles.register_local_role(role, func) app.add_config_value('trac_url', None, 'env') networkx-1.8.1/doc/sphinxext/docscrape.py0000664000175000017500000003570512177456333020423 0ustar aricaric00000000000000"""Extract reference documentation from the NumPy source tree. """ import inspect import textwrap import re import pydoc from StringIO import StringIO from warnings import warn class Reader(object): """A line-based string reader. """ def __init__(self, data): """ Parameters ---------- data : str String with lines separated by '\n'. """ if isinstance(data,list): self._str = data else: self._str = data.split('\n') # store string as list of lines self.reset() def __getitem__(self, n): return self._str[n] def reset(self): self._l = 0 # current line nr def read(self): if not self.eof(): out = self[self._l] self._l += 1 return out else: return '' def seek_next_non_empty_line(self): for l in self[self._l:]: if l.strip(): break else: self._l += 1 def eof(self): return self._l >= len(self._str) def read_to_condition(self, condition_func): start = self._l for line in self[start:]: if condition_func(line): return self[start:self._l] self._l += 1 if self.eof(): return self[start:self._l+1] return [] def read_to_next_empty_line(self): self.seek_next_non_empty_line() def is_empty(line): return not line.strip() return self.read_to_condition(is_empty) def read_to_next_unindented_line(self): def is_unindented(line): return (line.strip() and (len(line.lstrip()) == len(line))) return self.read_to_condition(is_unindented) def peek(self,n=0): if self._l + n < len(self._str): return self[self._l + n] else: return '' def is_empty(self): return not ''.join(self._str).strip() class NumpyDocString(object): def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split('\n') self._doc = Reader(docstring) self._parsed_data = { 'Signature': '', 'Summary': [''], 'Extended Summary': [], 'Parameters': [], 'Returns': [], 'Raises': [], 'Warns': [], 'Other Parameters': [], 'Attributes': [], 'Methods': [], 'See Also': [], 'Notes': [], 'Warnings': [], 'References': '', 'Examples': '', 'index': {} } self._parse() def __getitem__(self,key): return self._parsed_data[key] def __setitem__(self,key,val): if not self._parsed_data.has_key(key): warn("Unknown section %s" % key) else: self._parsed_data[key] = val def _is_at_section(self): self._doc.seek_next_non_empty_line() if self._doc.eof(): return False l1 = self._doc.peek().strip() # e.g. Parameters if l1.startswith('.. index::'): return True l2 = self._doc.peek(1).strip() # ---------- or ========== return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1)) def _strip(self,doc): i = 0 j = 0 for i,line in enumerate(doc): if line.strip(): break for j,line in enumerate(doc[::-1]): if line.strip(): break return doc[i:len(doc)-j] def _read_to_next_section(self): section = self._doc.read_to_next_empty_line() while not self._is_at_section() and not self._doc.eof(): if not self._doc.peek(-1).strip(): # previous line was empty section += [''] section += self._doc.read_to_next_empty_line() return section def _read_sections(self): while not self._doc.eof(): data = self._read_to_next_section() name = data[0].strip() if name.startswith('..'): # index section yield name, data[1:] elif len(data) < 2: yield StopIteration else: yield name, self._strip(data[2:]) def _parse_param_list(self,content): r = Reader(content) params = [] while not r.eof(): header = r.read().strip() if ' : ' in header: arg_name, arg_type = header.split(' : ')[:2] else: arg_name, arg_type = header, '' desc = r.read_to_next_unindented_line() desc = dedent_lines(desc) params.append((arg_name,arg_type,desc)) return params _name_rgx = re.compile(r"^\s*(:(?P\w+):`(?P[a-zA-Z0-9_.-]+)`|" r" (?P[a-zA-Z0-9_.-]+))\s*", re.X) def _parse_see_also(self, content): """ func_name : Descriptive text continued text another_func_name : Descriptive text func_name1, func_name2, :meth:`func_name`, func_name3 """ items = [] def parse_item_name(text): """Match ':role:`name`' or 'name'""" m = self._name_rgx.match(text) if m: g = m.groups() if g[1] is None: return g[3], None else: return g[2], g[1] raise ValueError("%s is not a item name" % text) def push_item(name, rest): if not name: return name, role = parse_item_name(name) items.append((name, list(rest), role)) del rest[:] current_func = None rest = [] for line in content: if not line.strip(): continue m = self._name_rgx.match(line) if m and line[m.end():].strip().startswith(':'): push_item(current_func, rest) current_func, line = line[:m.end()], line[m.end():] rest = [line.split(':', 1)[1].strip()] if not rest[0]: rest = [] elif not line.startswith(' '): push_item(current_func, rest) current_func = None if ',' in line: for func in line.split(','): if func.strip(): push_item(func, []) elif line.strip(): current_func = line elif current_func is not None: rest.append(line.strip()) push_item(current_func, rest) return items def _parse_index(self, section, content): """ .. index: default :refguide: something, else, and more """ def strip_each_in(lst): return [s.strip() for s in lst] out = {} section = section.split('::') if len(section) > 1: out['default'] = strip_each_in(section[1].split(','))[0] for line in content: line = line.split(':') if len(line) > 2: out[line[1]] = strip_each_in(line[2].split(',')) return out def _parse_summary(self): """Grab signature (if given) and summary""" if self._is_at_section(): return summary = self._doc.read_to_next_empty_line() summary_str = " ".join([s.strip() for s in summary]).strip() if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str): self['Signature'] = summary_str if not self._is_at_section(): self['Summary'] = self._doc.read_to_next_empty_line() else: self['Summary'] = summary if not self._is_at_section(): self['Extended Summary'] = self._read_to_next_section() def _parse(self): self._doc.reset() self._parse_summary() for (section,content) in self._read_sections(): if not section.startswith('..'): section = ' '.join([s.capitalize() for s in section.split(' ')]) if section in ('Parameters', 'Returns', 'Raises', 'Warns', 'Other Parameters', 'Attributes', 'Methods'): self[section] = self._parse_param_list(content) elif section.startswith('.. index::'): self['index'] = self._parse_index(section, content) elif section == 'See Also': self['See Also'] = self._parse_see_also(content) else: self[section] = content # string conversion routines def _str_header(self, name, symbol='-'): return [name, len(name)*symbol] def _str_indent(self, doc, indent=4): out = [] for line in doc: out += [' '*indent + line] return out def _str_signature(self): if self['Signature']: return [self['Signature'].replace('*','\*')] + [''] else: return [''] def _str_summary(self): if self['Summary']: return self['Summary'] + [''] else: return [] def _str_extended_summary(self): if self['Extended Summary']: return self['Extended Summary'] + [''] else: return [] def _str_param_list(self, name): out = [] if self[name]: out += self._str_header(name) for param,param_type,desc in self[name]: out += ['%s : %s' % (param, param_type)] out += self._str_indent(desc) out += [''] return out def _str_section(self, name): out = [] if self[name]: out += self._str_header(name) out += self[name] out += [''] return out def _str_see_also(self, func_role): if not self['See Also']: return [] out = [] out += self._str_header("See Also") last_had_desc = True for func, desc, role in self['See Also']: if role: link = ':%s:`%s`' % (role, func) elif func_role: link = ':%s:`%s`' % (func_role, func) else: link = "`%s`_" % func if desc or last_had_desc: out += [''] out += [link] else: out[-1] += ", %s" % link if desc: out += self._str_indent([' '.join(desc)]) last_had_desc = True else: last_had_desc = False out += [''] return out def _str_index(self): idx = self['index'] out = [] out += ['.. index:: %s' % idx.get('default','')] for section, references in idx.iteritems(): if section == 'default': continue out += [' :%s: %s' % (section, ', '.join(references))] return out def __str__(self, func_role=''): out = [] out += self._str_signature() out += self._str_summary() out += self._str_extended_summary() for param_list in ('Parameters', 'Returns', 'Other Parameters', 'Raises', 'Warns'): out += self._str_param_list(param_list) out += self._str_section('Warnings') out += self._str_see_also(func_role) for s in ('Notes','References','Examples'): out += self._str_section(s) for param_list in ('Attributes', 'Methods'): out += self._str_param_list(param_list) out += self._str_index() return '\n'.join(out) def indent(str,indent=4): indent_str = ' '*indent if str is None: return indent_str lines = str.split('\n') return '\n'.join(indent_str + l for l in lines) def dedent_lines(lines): """Deindent a list of lines maximally""" return textwrap.dedent("\n".join(lines)).split("\n") def header(text, style='-'): return text + '\n' + style*len(text) + '\n' class FunctionDoc(NumpyDocString): def __init__(self, func, role='func', doc=None, config={}): self._f = func self._role = role # e.g. "func" or "meth" if doc is None: if func is None: raise ValueError("No function or docstring given") doc = inspect.getdoc(func) or '' NumpyDocString.__init__(self, doc) if not self['Signature'] and func is not None: func, func_name = self.get_func() try: # try to read signature argspec = inspect.getargspec(func) argspec = inspect.formatargspec(*argspec) argspec = argspec.replace('*','\*') signature = '%s%s' % (func_name, argspec) except TypeError, e: signature = '%s()' % func_name self['Signature'] = signature def get_func(self): func_name = getattr(self._f, '__name__', self.__class__.__name__) if inspect.isclass(self._f): func = getattr(self._f, '__call__', self._f.__init__) else: func = self._f return func, func_name def __str__(self): out = '' func, func_name = self.get_func() signature = self['Signature'].replace('*', '\*') roles = {'func': 'function', 'meth': 'method'} if self._role: if not roles.has_key(self._role): print "Warning: invalid role %s" % self._role out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''), func_name) out += super(FunctionDoc, self).__str__(func_role=self._role) return out class ClassDoc(NumpyDocString): def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc, config={}): if not inspect.isclass(cls) and cls is not None: raise ValueError("Expected a class or None, but got %r" % cls) self._cls = cls if modulename and not modulename.endswith('.'): modulename += '.' self._mod = modulename if doc is None: if cls is None: raise ValueError("No class or documentation string given") doc = pydoc.getdoc(cls) NumpyDocString.__init__(self, doc) if config.get('show_class_members', True): if not self['Methods']: self['Methods'] = [(name, '', '') for name in sorted(self.methods)] if not self['Attributes']: self['Attributes'] = [(name, '', '') for name in sorted(self.properties)] @property def methods(self): if self._cls is None: return [] return [name for name,func in inspect.getmembers(self._cls) if not name.startswith('_') and callable(func)] @property def properties(self): if self._cls is None: return [] return [name for name,func in inspect.getmembers(self._cls) if not name.startswith('_') and func is None] networkx-1.8.1/doc/sphinxext/LICENSE.txt0000664000175000017500000000302712177456333017721 0ustar aricaric00000000000000------------------------------------------------------------------------------- The files - numpydoc.py - autosummary.py - autosummary_generate.py - docscrape.py - docscrape_sphinx.py - phantom_import.py have the following license: Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. networkx-1.8.1/doc/make_gallery.py0000775000175000017500000000465512177456333017065 0ustar aricaric00000000000000#!/usr/bin/env python # generate a thumbnail gallery of examples template = """\ {%% extends "layout.html" %%} {%% set title = "Gallery" %%} {%% block body %%}

Click on any image to see source code


%s {%% endblock %%} """ link_template = """\ %s """ import os, glob, re, shutil, sys import matplotlib matplotlib.use("Agg") import matplotlib.pyplot import matplotlib.image from matplotlib.figure import Figure from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas examples_source_dir = '../examples/drawing' examples_dir = 'examples/drawing' template_dir = 'source/templates' static_dir = 'source/static/examples' pwd=os.getcwd() rows = [] if not os.path.exists(static_dir): os.makedirs(static_dir) os.chdir(examples_source_dir) all_examples=sorted(glob.glob("*.py")) # check for out of date examples stale_examples=[] for example in all_examples: png=example.replace('py','png') png_static=os.path.join(pwd,static_dir,png) if (not os.path.exists(png_static) or os.stat(png_static).st_mtime < os.stat(example).st_mtime): stale_examples.append(example) for example in stale_examples: print example, png=example.replace('py','png') matplotlib.pyplot.figure(figsize=(6,6)) stdout=sys.stdout sys.stdout=open('/dev/null','w') try: execfile(example) sys.stdout=stdout print " OK" except ImportError,strerr: sys.stdout=stdout sys.stdout.write(" FAIL: %s\n"%strerr) continue matplotlib.pyplot.clf() im=matplotlib.image.imread(png) fig = Figure(figsize=(2.5, 2.5)) canvas = FigureCanvas(fig) ax = fig.add_axes([0,0,1,1], aspect='auto', frameon=False, xticks=[], yticks =[]) # basename, ext = os.path.splitext(basename) ax.imshow(im, aspect='auto', resample=True, interpolation='bilinear') thumbfile=png.replace(".png","_thumb.png") fig.savefig(thumbfile) shutil.copy(thumbfile,os.path.join(pwd,static_dir,thumbfile)) shutil.copy(png,os.path.join(pwd,static_dir,png)) basename, ext = os.path.splitext(example) link = '%s/%s.html'%(examples_dir, basename) rows.append(link_template%(link, os.path.join('_static/examples',thumbfile), basename)) os.chdir(pwd) fh = open(os.path.join(template_dir,'gallery.html'), 'w') fh.write(template%'\n'.join(rows)) fh.close() networkx-1.8.1/doc/source/0000775000175000017500000000000012177457361015344 5ustar aricaric00000000000000networkx-1.8.1/doc/source/static/0000775000175000017500000000000012177457361016633 5ustar aricaric00000000000000networkx-1.8.1/doc/source/static/networkx.css0000664000175000017500000000125312177456333021225 0ustar aricaric00000000000000@import url("sphinxdoc.css"); div.admonition, div.warning { font-size: 1.00em; margin: 0em 0.em 0em 0; border: 0px; background-color: white; } div.admonition p.admonition-title, div.warning p.admonition-title { padding: 0.1em 0 0.1em 0.0em; border: 0px solid #86989B; color: black; border-bottom: 0px solid #86989B; background-color: white; } body { min-width: 740px; } div.bodywrapper { margin: 0 300px 0 0; } div.sphinxsidebar { width: 275px; } dt:target, .highlight { background-color: #E3EFF1; } table.citation td.label { font-size: small; width: 120px; /** will need to adjust based on largest label. **/ } networkx-1.8.1/doc/source/static/art1.png0000664000175000017500000020253312177456333020213 0ustar aricaric00000000000000‰PNG  IHDRøÿÜÔ³sRGB®ÎébKGDÿÿÿ ½§“ pHYs  šœtIMEØ &û=%ÝtEXtCommentCreated with The GIMPïd%n IDATxÚì½y´dwYïýÙC횇SÙúô9§OOé Ýé$`: 1À}%*D ¢‘WPpÀ{Kôu]îUº‡ "‚"AnHÁ¦„$HºCBÒó3öj®Ú5ì¡öð{ÿ8Uewz’PÏZýÏéS§ªöÞßß3}ŸïØÀ6° l`ØÀ6° ì‡aÒà ìbÌó}!|Y–‘eyðܰŠ%††¬j¡M›ˆÆãƒ‹2°½Lo4ÄÉåeáûþÀ}<øÀ^hæ{UTÐØ ྡྷ(ƒ 1øÀ^ˆæy²°ç¢9¶¹¸HÖui·Zè—H5íyðA›lð=M’e©V«BÖ´Kz½eYxž7¸€ì¹hM#½u+UÅêtH ÷Boa™&N§C$#œƒë•ŠX~üqlÓ$ã !äA¬>øÀž;Ö¤ã8bmm­r·tÚÑ£(Íáa\תªöÁëzžX;|˜´e!|ŸÖò2ñLfpAŸO÷~p ~Œ£ñ¸Ôl4„ŸÉ›œÄqZB'–J!I²,Óét.ê³*ŠrÑ¿û#K­šM*ÇŽ![òÈÈ̼ÀÖIa½ÿ[.—ŸõÅ÷}Ñn6ÉÏ΢E£¸®+|ß§](0ä8 KG ˆú>ùT _2²Tóyܱ± ¾O ¤cöïǰmší6IÛÆi4ˆf³4òyZµ¡‹˜.SUÓ4/x°\êÁölÞ·f¥BH׉T :6 <øÎç6­;ue?Âùý>ÐN‡ê±cÄÚmŒõ…Ôð0XŒÊê*†aàÄãÄ$‰ XºŽ”ËQ2 \Ë"6:Šr£›žça®¬0‹±´º õ:¹XŒÂµÅEÒžÇj½Ž53ÃèèèîyÞCù lù§žêlÏæÁŒFYnµhË2êÄÄ̼Àöô°T*•JâÔbÔ¥šëº¢šÏÓiµhÔj"14$yŽƒdÛÈŽƒgÛ8–…,IR§Ó•@€F±Èø† ´ggYY]%¹u+ã›7S …°êu²›6q¡íž–iŠüâ"µr™¼mSk·ÅJ;FB :V«uAàʲ|V²‹ëºÂhµPBáðy•b:•ãÇIt6ýYž/$“È›6¡Åã¤GFP/q,vðC‹F£T*•gìmÕ*æ±cÄ„ V©`†Ð‚Acc”©É2—u·ujš&ù¾/ü@€H–e]àpzµÝó#£3rpà ;:Êðȃ‘ÖÀ/)L¿Ô“çyBÕÕU®‹Ûé º¹‰H®ç‰Ôø8q]Ç´í~n+˲T*•„eYär9I×uÑn·Oó¢ò¶¾ø–EÀu + ©áaÔáa6ÌÌPI&IŒ Ë2†xóó4çæÈ+ •bQ4p4zÆlxO²éÔJºã8t ²ž‡aY,>öCÃÃÓiLÓš¦i„Âa¼L†…ZÜè(ÃÙì³vŸ\×kkk¤Ó鸿„ ¤ªR©Têƒìb̲,±vâ¾i¢ ÑÉd(/,041A(]ÿ»Ý¿7£}j‡©×ëø]¡D]×û¬³‹¸¢(Ä7l`©P (cctºª¨½g_Qé©ãÇE¬Ó! a--±\¯“M§iŽŽÒj6…ç8ëBš&ɲ|F«LUU”d’åãÇi™&±p˜˜,3û½ï‘ظ‘Ô–-xž'z WEZËçE8“!—Ë=«*­¦i"IÒEwØé¹]$BµZ=ía=[jÛ6ªªR>ymu•ˆ,S¶,r;wA’å3Æ-€¤ëº¨Õjý4@Ó4|ßÇuÝu) –eîöã/Díy°b¡ ìÅE¢É$®®Ÿh¡v$B»R¡X«±3&éy,ÍÎb¬­Ù,z]4uN§ƒÑn÷£ UU%˲DK4Ó$Òh°¼°@¨Õ"76Fuv–Žë¢W«"žJáû>««« ŸU=æû)Þåóyb±Øk€ì ƒ!Φû¾/J'OR;tˆÆÒñ\+!+IX¦‰ŸË¡* ©¡!ÎÕr‹Åb´Z-ÆyFEÁ¶m4M“*•Š0M“H$rQ¤”~$‰ Ë2Žãœæ½ÛÅ“'EÓ4ImÛFMÜ`SU©—JXá0;b‹sskk(¶ÍÂÚÙ±1䫯îƒ< IùBAT£\¦X,’PU*í6õv› žÇÚñã”''‰#+ T·Ç®ãÐ4.¦îaÛ6ŽãíFGçK›ÚÍ&A$çÇ¡G>øE†éÅbñŒ0Ý÷}Q«TX=p€`¹L¦Ñ@RÔX #“¡Q.319I Äg;$E‘LÓ¥R ˶…ïûhšÖŸø ‡ÃT*•¾|ñÅ2Êzcž==tÓ0pHzv›v6K²ÕÂ1M¬™Äð0éúì,¥bSQØ`•J¤ªUÒ™ ÆÊ ‰\î¿ÞHE!7=`ê:íFƒT4ŠjYtææóá0©mÛð»špç«È=ЍבÒi:ŽÐ4í¼@lµZD"‘óF¾ëÒÑ]"‘yd¡ÀÌÂôz½ÞÓ}!D³^§uä]g¥PÀl6 «*cÑ(3;wR­VÑÿ ™‹Å³¶Ü×f»M£RaåðaFR)D$‚’Hô÷‚õ€z)”Ñ^Hî8Îzº„rزÐdÅ0h./³-% a›&LsuK’°FG™˜œ¤vø0uà£it€`(tš„r/ª% UU‰g2¤¶l¡µ²ÂS<‚ðŽãЬ×ÅêÑ£è• )Ë"“NcibjŠ–a g³T•hO©ë)N=$$I¶mjù<‡î¿ŸN>Q«á‹°u+êø8 ÏÃGÓ4©P(Ó4‘eù¢Þñô<¯ïÍñN§Ãüò2ÍPˆD.G­\¦%Ë ª¸kkŒX™p˜EÏ#7>Žãy01Ùl¢¤Rd§§ÏÞº®‹$I„C!"[¶`HåãÇ™%‚µ}ûpËe[¶àzžèÕ%|ß«ËËšMBàÄã$.Ðl·Û‚g¬ð}_8ŽC³Ù$ŸÏãjmUExÚÐÐ…Îûà—¦Ÿ\ZKóóP.3ìy¸í6×%à¤Ó\qÕU´Z-–––h6›„ÃaºÅ³~.ßqJù<ÍR }~ge…˜a YqI¢éûœ\X`ãØMÃêöŠÃá0­V UU/z.»÷û¾¿îÍ»¡æòË©ŽŒ ÕjŒŽaŽâ·ÛhÑ(µ'(×j$„€Db¬ñ8ÑdË4áé!ó©])Çq‡Ã(Š‚,ËR³ÙJ @+@ÓuN,-1 ·,N~ç;X†ešB (‹øÀä 7à[É¡!"±Øysêµµ5‰Äi}wÏó„eY‹ElÛ& ‘Ëf£¥ë!ˆ§R(/ðð|ðK0Û²Äüw¿‹±´„c$FGéX‰­[™ž¦Ñ-’e2ªÕ*ËËËlß¾UU±mÛ¶Eye…Å}ûðk5R–…ä8ÄmÛq¨8^,†49‰%IŒ‘B!êõz_|áb=¸¢?ÏÝ¿é™áaªõ:’$‘E®V ƒXãã,ë:r.G2›í÷î%U=ãïœj½ÂŸmÛ$“É>àHñ¡!r[¶°:;‹×l2{â¹Dmy™ã• l–ÔÐcccébô×{âH$Ò×Ûí6=ièH$B:FUUžï¹¶ïûÂl·±L“h<~Ñ"$€_¤™í6ÑN‡T<βm“‚ÀÈ`-b$emmV«Åää$ ^¯£iËóó¸…öüsç¼ü'ß÷)•Jbmm UUI$D"‘T•Ühµ¨:„fY´.A„dð‹ÍÁ#ôdÇu™ á-[Ø8=mYäóy†‡‡Éf³ FFFذaž|’!EaõñÇDˆ´ZTZ-šŽC;&²i¹™B‰áHd½€æûDÂa$Iê×u ƒRµZÕjµ¦Ÿ·ì8ޏëÎ;ù·œ©;yÃwN§ûÞVQ騱c²¬>ø{‚Œ–eõ½ÿ©¾ž§<[¤Ð ñ<ï4€÷FRƒÁ`¿04<ŒríµÝ··ÕbçæÍ¨Kûöá^yåy+çžï‹}èCÌýñ±m>ö–·pÇÿø$ r¹Á`be¼U¯ã d“IêÍæE‹ ~±‡%Ë4…ÑjqY0ˆÝéP*"™LR,Éf³¤R)–OžÄo6©?ñn£A\’°M“Ц!6nD‰Çžšblbb}ÎÚ¶Bô§µTU%³ººŠã8§…¢¥R Çq.øy9·Þû^þpi‰ûö·ùÚè(;~ÿ÷Ï(¶ÛíÓ6‡Æb1lÛîËzÞ»WC8G.ÜÿüOã{?ï)a3n8ÙË/'qÅůיظ±°@Y–ñ|_œ-G6L“ƒ=ÄÛ ²ÀíÝK8exxø™Oû¾/LÓdyi‰¶ªt]D6Kè‡íàÏÐ<ϦaŠD…ÃD»©V«„B!‰åJ…€,S;r„ÖÑ£ŒÕZV0HhjŠÔe—1±};u]ÇudIÂsÝ>»¬ç5»žTj4baa¡_yïû°»ÂÓ„f³Éüüù$úЇªUÒ@Øu©êúY;µZm½Å%˧yi˲ú:l~—檪g>.õrY,}ï{H²L|jªOÒé™eYýŸyžG³Ù¤Ùl’N§Ée³hš†£(ˆÕU\Ó¤µ´„Óh ­G.§¯Rr]qÿðØü<ÿ'#LÝ|3ÙndòB3ÇqD¹\Æ4MFÇÇQ&&èX‘XŒ`08ÈÁŸ5pû¾(--á,-á+ ‰­[êz ÇqD¥RY'[„Ã,:DNÖI%Õd/•"±mí¢Óé ‡)·Û}Oìûþi;¸{L¯H$‚U¯sôÉ'q¯¼’P*E3ŸÇ‚©ééþ{>|˜ä#,..¢lÛ¶·¿ím<²u+üÿ/µh”H¡ÀÉ“'ût×^ìTJl  …hµZ$“IÇÁu]ÂÝ<þTs]W¬8Àa€$‘Ÿ›CÅu]ŠKKÂw]*å2‰DÃ0(‹¸®ËÐÐÑõ:“33$!Ó±õùy @äµZM|èCⓟü$¯¿í6®{Ñ‹p‡›öì!t‘ûóÉk÷Š…š¦166Æ…È>€?w]ìB¬ïcX­b±BÉ÷}Ñh4Ð B‰•|?•¢2sÕU´;LÛîdjš†Ã0ú¡kàŠ¢à çû¢R(PÚ»sq‘Ùb‘±‰ D½Î÷äо},7›T*â±S6ð+o~3[·o'‰¬ K8Ž(¾å-óy¾ùµ¯ñçï?oúå_&Ó] ¨ti£¦ižF I&“´Z-b±Žãô lOÏm%IBÖ4ªÍ&²¢à…BhšFuek¾ÏÔ5×°°°@0$›ÍX.š¦õóòP"Á¶={(>ú(•zZ½ÎçQ0 üqñîw¿›oûÛüæoþ&¿þë¿Nøy(e}1ÖétÖ–E:&~_5…À/ÂdEA‰Ç©Õëëu²ÓÓ§õP{7À²,!KŽç¡™&d™`(„ R4M–––Ø´iSÿán·Û}ž»Û­V7òyªOŸ]QUÚºNýØ1ÇŽá–ËDJ%J†ÁÚâ"˶M±Ó¡¹u+™íÛQ·mãõox;®½–¹'ptÀä$ñ¡!|ßÇŽÅ8þÄä…Ñp˜ÚÂV @°Ñ@²m*¥¾ë242ÂäŽ(²Ìw¿ùM®ß¹“Ñ]»°ŽÁÕuªšF4‘ J¥˜ŸŸG’$‰Ä9 .¾ï#­×"ˆuYgZ$BtÓ&‚õ:úŽ*I„…b«EÇqÖ¥¢º»ÓLïrÄ-ËâË÷ÞË·¿û]Æ ¢µ¯ºé&f¶oǶm.†ó¼ðÞBˆµ'P $Ë¢Õj‘ݶh4ú¬~·g ðÏWø>`ð’ÁéØ6¥£G 7ÔUqÅÏøs<=Tþ†ë²,¹®+òù<úY*Ó§~ÇqD(Â4MNœ8A¾T"2¬×‘j5 ùOùÐ!rÏð9y†ç4 «Ùd$™$•Ë3Jú‘¼Y¯Sê)p"Ï`ô®cÛÈí6!ßÇ5MìKPL9µŠ[Y]Ån4ˆ ÿÐÆÿzB…BV«%b±ØYß³W€ …ÃÜ÷Å/RùèG‰Kß¹ì2.K&iÔjá0¹ MM±ilŒ=¿ð dÇÇ)®­‘ÐuÏ#­(F¿W-Ÿ²'ÌBœ8tˆ¤ça©*5Ues4JFQh Óêtˆ©*n0H$%™N“#=>Nê'~‚ûÿéŸPTÉ0È\uV£ÁÒÊ £’„»¸HÑóصk×ùZ9t zƒALÓÄê¶rÆÇÇ©T«$ \×娱cý_  T.sððaR©AUåóŸÿ<×^{-ããã|ñ‹_äõ·ßζ©) {÷²-›Åj4¨‹Ïû1O˲D!Ÿ'šÉxÑ‹žGfdä’~<#€{¾/V!iHžGme…äÆõžn¡H)›eéÄ üh”ÍCC—ü9Õ*ö‰Äe™z­†võÕç{E»ÙDQ"—¨ü17?/¾öÅ/’H§yõÏþ,ÉdR …BR³ÙµZí¼Ì+!Â÷©8Áï´Û¤·ÍÎ~õ«™¾á¤ÑQÒããëm©tšd6‹Ñnóø“O"7›lÐ4´tšH0ˆ8eȤðN§CãäI6«*^$²þ~™ ³ss„6l@²m÷í#9õá¸óNÚŠÂÑ7¾‘;ÞùNìNç´^yèâº.r0HµÑ ã8hccŒ^vN(ĆÉIêÞÉ“LoØ€ø33T×ÖÈNL …Á W¾øÅÜóè£xò3‘™p˜“'Orïw¿K`jŠŸ|å+Ï™GÙ·¡FUZ‹‹”…h$Ò¯7Œtç¹{d]× Ô ÌàO[-iµø³'¸ç3Ÿ¡fY¼õW~…Í›7Óétß°l6˱G!å8Œ¬=õ‘ç¹Åu]Q©T0M“L&ó¬Òžu€÷4¼ëªJ ^‡g¸»«òµµµuIÝK8 ëÅ"•ÙY*µßgfëV"ç8$\ÏéTȲ®þa”Ëç¤Bž‘Š´Z´÷ïçõÀS¦ÉÞuË• rÌBˆ¾òÊÓ¹ÚŽi’(•ˆÁN‡¯MN‡Ù81Á¾ãÇi<úh¿M$„ òûï'ÿÅ/ò÷à/Nœ@ëæñ=^ø©«|mËbdëVZå2n§C"—#M$Ö«áCCØá0uÏcxhˆôè(J(D,£]*1‘N“»ì2$Iâ;=Æ×WVxéM7ñï?ÎUßúB’øÇ¡!ìNGŸ¥ßÇõ}‚¡AY¦X(°T.sùu×a‘H„P·8©( Š¢Ðh4ƒDÃaÄ®]|ø«_¥¼áØ1#ûÀîÝ»ééÒÕëu×%œÉ l›å• ×¥ÒnµDôéÑs-$ÏçóȲÌèèèµÍ÷ŒsðÞ‡ô}_ êõú3ú;= ä…t¹O¬ãÐ\\dÌu‰§Ó,* Éññs†Ýª¢ ¦RTNž¤Ôl2Ô}¼¨Ze±”Ëñ¾B/“á]ôG¼â–[N‹:ù|¾ÏÐêy×v³IýàAd]gÅ4aß>nyÕ«˜ž™A’¤þÄY³ÙD×uvìØÁµ×]ÇšÆßìcT}#Dë2Ûz­S=¸mÛ$’I‚á0n—”r*5’L¢LOã ‘Þ´i}Üð=EÓhùJ…ÜЯܳ‡oïßÏç}”V>Ïÿ\!øÀÁƒ´Ï¢Dã A,—£íû”K%B­ÙF±ºJ3eÓæÍ§ŸÜ¿_èºN8&›Írým·ñ7å2›ŸzŠßo4øw w© IDATàyt§O¾ ƒÄb1â±'r>ÏããÐhP/ž³¡¸¤®C95$ÿa‹B~ßm2Y–%Û¶E>ŸÇ0 ÑcQ]Êë«Õª0/aE¯ãº˜¦IU×qe™Èú¢¹ó"R§Ó­tšçѶ,ZÍæß§T*‰~ðƒüü[ßÊUW_M<eËÌÌYÇ[­–¨Õj„ÃáÓ¿X$‚©ë¨¶·o[¶mczzšR©Ä‹_üböîÝ‹a4Z-rÃà çrüÆ{ß˧7nd2`å;ßáž»ïæ†nèë¹õX]]% aÛöY%I"“ì2Èz@<“¡½u+–ã0S9z”-7ÞÈ~éKœ,y_*…WUgéøž‡ ݲ-™DôQ’Ñ(~§C0B’åÓreßuY^\DU"‘kkküôOÿ4ÍË.ã]_þ2×_Ξd’µÅEF&&úl?Ã0PU•ì† (­m]§¶°€æyÔËe‘ÈdžU-tÏó„ešÈÝæRBiÛ²D~vDz‘R ðC ÉŸu€ƒA©V«‰jµúŒ6€„ÃaÚíöE½Ö2MQ›êjOôñÔÉ1€D6KfãFFFFxri‰o>ð?ûÚ×Ò±m¾·?ï|ç;9~ø0õÁò⢈§RŸ›c$—C¯V©./I&‰&‘e‘AE!¹²Âj£Avf¦×uûï»Ú¾}xõ:j ÀÜÜ;wíâx³É•¿ð \71AÜ4i8@@QÈMLèέ÷j<ÑéiæöîeTÓØ iè‹‹DS©g žï‹òÒöÉ“ˆ@€Ô%Ôk|!Dq~ðšMjá0;^ö2.Õé=çô ÎÕ¾@.Þ¼µt­V#‹­‡öé4¡pø’6_F£Q©Ñhˆr¹ŒeY"ô´ÁùF£!>üá“Ëå¸ýöÛÑ4Mr]W´táûÄR©ÓZ®ëŠüñãDòy ºÎII:Ñj6ÅÑo›àê*ÖþýTuÐÔáÉI Ë¢öµ¯ñ‘…ö-,ðo™ W\y%áîw ‡Ãüöoÿ6ögƾð^ûÚ×ög«-ËêsċŢxº{:è{Sc=Uß÷AZ­{~˜ÏüË¿¦^ÿznú©Ÿâß¾ðøÆ7øo{öà=ÊGþà0m›àý÷Óܼ™Ÿ~Ë[¸nÃj++äÓiB##$“I­#ŽƒU.SÖ4\Ï•åe†„`:›Et%¥u]' ªŠE®ºñF¬f“t0H6¦U­ö7¨öEe™x2Éø–-„òyjµÕv›ä)©À¹Bf×óº)áùž×q0×ÖHw:ø®K³P¸¤Š½ÑncW« ÅbH©ÔY'ðž—W¥ß6MS\Ê0€ªª}QÁ …N¥µ5tÇA¯TðÓi‚¡PæøT­² ­¨ÅbxžG±X<­ÍÕn·Åßþíߢi¿ök¿F(’|!Dyy™Îì,’X7žV¤s]¯Z%%IMcue…ìø8^Wfgî‘GhìÛGÆ0©Ó¡¾¶Fæ²ËȦIHß+0 MÓˆÇã}çwÜÁÇ>ö1„¼ñoìïKt÷š«E×õì]›À{ÿ?;7Çg?ùIÚKK\¾¸Èí¶Íü÷ÿΉw¼ƒééi~øaþú¡‡¸f|œÇ¾ó&ù pg¥Â÷6mb÷­·ÒZ\dÅu MN’šš"(IVVp<C×Á² ZšÆû¥_B›˜ *ÇOœ@–e6mÚÄ+^ûZª'OÒ:~œv·5hv¹=1Œ61>ÎÉry™‰‰ êO=…mÛâ\#”j}~$‰T7¢8_MÈ X*•Hd³„b1¸ˆèß÷}ÑÐuUEšœÄVU7¾pë\ìZ­&ÎEã¼ÀkiŸ²5ãl°´´Dgi ×óh§Ó䦧‰Æã§ EôzÞ•ÕU\Ë¢ÝlŠh<.-÷ï‘ý»cŒÂu]þþïÿÓ4ùßù¢Ñ¨äzž¨‹¬;Æ”$áØ6­rzú¿.b €šÍR\X bYd·mcáØ1¢BÐn6‰·Ûë«}ŸZwø0Od2 oÞÌÑbÚm2‘ÍbÙóPGFÈ7›ÈºÎ¦h¹^g^×É^}5C‰‰TŠ'¾øEFGGY\\ä¯x[6ofYÓ7o&c¶ZóóëóòSSD²Ùõƒ´+*¡Åãh©1M£T«¡×ëèºÞ—˜îýs=…'Ÿ$gÄâqôååÓ”\Ïò\ f2hÁ ‘l–d6{ÁüÞó¼~ lrzš€¦!ºëºWÿ¬/Ï4Tïñ‘ÏÕËv]«;²Ó4jé4r €ªª†qÚB€Z¡€sâA!(×ëç$¢È²,yž'Êå2ù|žÿ÷§P(ð{¿÷{$ Éóí‰ ¶¼æ5!È S©TxÿûßO0çMÿûóò—¿œcGŽð™Ï|†¹¹9n½õÖþF&“allŒÛo¿;ï¼Ó4ù¹Ÿû¹õ§ÙÍ®âêÓ=xOË­W„T………î¾ûndYæW~åWª*Üww'*—Y’ev¢Ýf÷5×ð‹¿ø‹œ\Xà‰ýûÙ½k_½çŠõ:/½öZ†GG™øaªå2Éh”ðò2•`É]»h/.R]\ÄñLzûvÉ$¾4›Mî¼óN:ÄÛÞö6¦6nÄ6MvìØÁ›ßüf¾ò¹Ïññ÷½=?ó3D¯¹#&‰Ëåø­ßú-Þ÷?ÿ'²ëòÚë¯G‚z(ÄÈÔÔ\’$‚Á õFƒj½ÎÁ‡â ÷ÜÃõ×_ÏM7ÝD>Ÿgÿ¡C|ëÀZ##´_ô"v ^¯óÿÞ~;?uË-R»ÝFƒW^ÉÝŸø{®¸‚x€/}úÓ¼í=ï¡™J!€É\ޏ,S¨Õˆ†ÃØÙ,åv›Òì,W ­T() µz]ü¯÷½Çqصkñxœ••r¹Üú¡ëû¢ÖnS9z”‰P?'’<¥˜&IîÔžë …Î Æv«%Ò¡r*…áyÄâñsÎMxž'òù<‰Dâ¢Àmš¦Èçó„B¡žzës®'ÿIÂá°T©TÄùöÍ›–Ëå³nþèt:¢øÔSxõ:F0ˆ<:J0™DUUZ­òz;¦ï¥##”ëuê†A0?ï.,ÏóÄÝwßÍìì,¿ú«¿Ú#XY–¥N»M<¡ìºÌ•Ëø®‹é8Œ£ë:…Bá4÷6 7›,Õë¸ããˆp˜«®¹†Ðc‚Žm£ƒÜyçìÝ»—={öpë­·R.—Y^^fË–-¨ŠÂÍW_ÍŠ,ó­/|b³Éõ/})ÓÓÓ˜¦IÔóxí®]<øÿÁt«ÅËöì¡ØhàoÜxšwêmBY˜Ÿçß?ñ ŒFwd„WÜ|3–eñÑ~”R©D0¤\.ó§ò'LMMa{÷íãÁGÁ²,aŠ¢ðò—¿_×ù×|-!64ÄGþæoصs'7\=•æªU‚7"É2’¢™Á3 Ó¤U«QªVY\X ¿¶†$ËÜxãôÚ‹]ñ Q«Õ£QBcc„¢QÔX ³Ñ`(“9+8}ßF«%\×=cõ±eš´Z-&Òi¢ë-Ìs> ݱ?w¾|»Õj­ïaK$N“‰þ±8@*•bmm ½Ñè \è"„Ãá³nñl7Èù<d™šHÃÆA&“é+‚¸®Ûx$c|×.2Û·S©VÏø¾/¾üå/óÐCñîw¿›±±1ÖÖÖh7›¬-- dwí¢3‹!b1ùÒ—ÈML°óÆ ‡Ã§yr«Õ"ä8ÄB!jÁ éíÛÝÉ)£Ýfhh_¾ùÍoràÀr¹?ÿó?ÏÔÔ”T,ÅÁƒ)–JD"жM2›åe7ß̃ Ìö³üúwÐ1M´Zí™ c¯~5wùË”Úm®~õ«×;Ó<¸eÛÜ÷™ÏpÛW¿Jøÿ¦¦xbd„­[¶ðò—¿œ`0È<@,cçÎëS^¡·½á üÉŸþ)w}þó¼ò¯X/ʲÔnµD½ÑàÃù#¥³°º°Àž={ˆlߎª(LLNÒj6ûu‘Ȇ ÔK%:ù<3Ù,ßyì1Ìj•m»w366F³Ùdtt´×ž¤ÑhH¥ð·m£^©àÖë$žzеuÈ3F£V£~äŠëÒïë×麎ÞhܾÓ0Ð’I]5›³ô«««$ׇt>‡P«ÕÖÇ;³Ùž”Õs–M÷ÛÝ¢(Š”L&™?p€ÅÇ£¼¼Ü¿ðçë%Ÿ­]¦xªJ¥Ñ ÔhÅP»RC=ÙŸnq Ïó„,Ë’¦iR<‘’ÉdÿÐxúû}ýë_ç¾ûîãï|'SSS’¦i’¦ª<ñùÏsüÿ‘Åo~“äÖ­ìþÅ_dÏk_Ëötšc?õ÷ß{/º®£ªjOéTDS)Œh”UÃÀK&ûÈ=Ͷt:ÍO<Á}÷ÝÇÖ­[Éd2ÜtÓMd³Y6lØ@¥\Æit”yIbÓž=¼÷ÿ˜x,ÆGþú¯™[\ÄÖ4Ší6‰©)Þø‡È#¥>ܧƒöríB¡À?ÿó?sèÐ!Ý›==5Åo¼ýíÜ~ûí\sÍ5 :ÄW\Ñ/` Ï£¹¶Æ-×]Ç7ï½—‡zè¿V Á7ßÌ®n ò¥/ñ‘…ÿùŸüÙûßOÛ0Êdúu‘Þ¡+yÑpÃqÐuÙ¹9»¯¹Ó²ˆÇãhš&Õëõ>#2‹qùõןš"‹áÏͱøÀ¬-.Ò6 ¡ëº¨T*¢X,ŠÅ#Gp 2’„[,Òî²Ç Ã`td„™;ذ{7#›7Ÿ3Gn6›H’tÖ ¥–iŠÒÊŠ(çó½.£££D£Qé¹>ðòñà¾ï _ÊkkH++HŠB»Ý&œL^(—ÖÖÖÄ©´U×uE½\ÆŠÇ‘b1²Œ+©d’R©D,[èïî {z¾H$h·ÛýÕ¼={ðÁÅg?ûYÞñŽw°uëÖþM*ÌΓOét¨ÖjT6oftãF\ÇA E"LnÜH3›åøñãH¾O2¥Õh°iófF®ºŠèô4¡®Ý|»W@ìUÀyä^÷º×õI½5?NÓ0Š‚Ëdáõ·ÝÆ·x€ýô§yÕ«^ÅÖË.C›˜àª;§Ó|àX_p°uËò++Ü{ï½ÌÌÌðï}/õ—IXUù?ø²™ ~·ÿýÐCñÒ—¾”t:mÛD£QZ• B+c1ê;vð¹ùvîÜÙ/B Ça(`¸¸<â $ñ©O}Šw½ë]ý¢ž„ë®× µÛh®ËA]ç?——QC!†â‰l½æÊå²Èçóë »nZÕj·1„àè“O2bÄ'&X9x·Ë0SºÔè(®a ;-YÆ«TH&“ôƧ>c«««¤Óé3¨¤®ëе#GVW)5„·ogËå—?o–*¨ Ø·öé'Voû¥çyضÝçe{ž‡^© (²L©RAYó\œÁ£×Ë.Ÿ<‰3;K\ÜÑQƦ§×yÏÁ Á`°¿é£Wl{:ÀE‘z̵^(ý裊O|âÜqÇ\yå•ýÏÒq±÷®»zùBŽƒÖj±º²B0D¦&F(Ä×]GK×9ðÿA©Xd9‘À°,vìØAndDªT*¢Ñh`š&¢[”»û馛ÐuP(ÄŽ;Îøî©TŠd2Éüñã¬>ñ•éiô\ŽÆâ"[B““ìû˜¿öZn{ãiµZ\yå•ÜòêWówïy7W«|-£uýõÜöº×±9•B_]eÇô4#›6±id„âÁƒ„ÓixôQ\×e÷îÝô>ëðð0V8L³ÙÄ\[c[4ÊMüã_ÿ5•JEÔëuÄê*›>žHð¸ç‘{ÍkxÇ[ßÊþýûù»¿û;ÞðK¿Äpw A<™déȨTð;L×e­Ze÷ä$±|³Zå1ÓÄívDº…¶uõßÇ–$܉ 4 •ËQ «*™l¶2ÏóD5£°¶F8gtt”ÐúÔ‹a£Ñ@UÕ³.vèt:Ô——²,rá0hò9Ômž—!ºÓéˆÒÒkGÒêêsw:ÑjµD¥R…Bµµ5ºâ!ˆÅb ³udž¯¹iÓ&F¯¹5`mu•Z­&\×çj—9Žƒã8BÝ]€aÐ. ¨*CCCT*"‘¶m£ª*žçõ^wÖC#£ë:û÷ïŸøÄ'xÓ›Þĵ×^+zP5ëu<Ó$¹y3Òe—ºürZ®Kï;6L‘N£¤Rë‹ò‡ÓÓ\53ƒjY9tˆGöî¥T*‰h4Úo[™–ÅìÜ/zÑ‹¸öÚkyä‘G¸õÖ[{’8•ðÓÓ@›ŽFÉ™&Êê*Ë?NÎ4Éê:Ûs90 N~èC|ämoãË_ù ÿñÕ¯rôÄ vÕj¼·Ýæµ…£ŠB¤\Æ8ruu•Û†b‘¹¯~•H¹Œ~ø0_ÿÊW¸lÓ&lÓ$ bÛ6‘H„¡‘J‘I"œLrËK^BP>þÿ€Óé ø>ãCCl˜™Á|éKyÝÛßN*•â'o¸ šÆ'?ô! ù<Ùl–Žëâår¬É2Š$‘1 ܵ5RÁ r¡€»²B~i‰Ñ±1®¾új¶nÝÚó¤t:TEá·ÞÊô+_IUQH{îñãT×Öú÷­Ýncv:ŒnÜÈÔô4‘Hä¢CgÇqD³Ù$•J±À°ÑhˆR©„ŸJщÇñ2b¹Ï'ɨ zðz±ˆ77G Óavq‘dw¡ž,Ëhš¶¾Ï*ìÏûžÕÃwwGw[ ÔëuZëËúDôi;¢{ÜãN§C4•j¥’¨é:¦ªbts»D"mÛ4›Íõ…yÝÞqœ~«ìéz×uÅÃ?ÌwÞÉm·ÝÆž={¤§µSh=ʨ¦‘Fݹ“Ý×^KÃ4ñ»* ½±Nß÷9|ø0ÑH¿TB5M´ÑQÆ6o¦P(°wï^¶oߎ$ËTk5>÷éOÓ,¸îºëxôÑG™˜˜`óæÍgM)b±µZx2ÉΫ¯F ñº)‰•ɰ÷À"û÷ó·Ÿ}ôQ>ø›¿Éë€y!8.+ËìÙ,Ñ'¨ÅFƒr>Ïm#‹Ìåó쟟çòh”Í@cvet”d*E(¢Óé#•ÍR˜% òæ·¿ô£<úÝï²çºëXµml×%ÔåX¦‰Z,òê+®à[†ÁçþùŸ‰'“LMN225…ÙéX]åÑûï' ‘Ìf©( ®¢ ‡é*²V*•þAÓ• &‰Ðt]|!ˆ( !E¡ÙnÓqQ*•°m›ô3+Ôu`0ØWõ}_XÝuT¾ï“N§Ç2 U=¯øÅóàžãY&àû ¥Rĺjù¾/z…ÖÉýÊ0 êõ:FƒV«%z7GQ©X, Ó4ét:¢¥ëD6o&‰¬f} …TÏ‹ž–w:³`yy™Ï~ö³¼ä%/áe/{Ù§øÜþýtfgÉ$èÃÃÈø²LPÓ¨V«är¹~>ou C‘H„øØÍF[×)u+àù|ž»îº‹¹Çg~nŽëäwm›üîï"oÛÆ[ßòLËBîVØOK)T×4)ë:eUÅRU6…B, Ô;##Ãa¾nì“$ftßþæE/bibùØ1¶Ê2ÕåeÔD‚±D‚F2‰êººÎìÑ£”uk®¾šÖÒ^0ˆ( š&æ®]xžG@QE£ˆîöÔ±ñqn»í6>õOÿDHÓØ81Á,•ˬ--±qf†F»M³\æÚk¯¥xäÿøÇùýw¿›b±ˆ ²àû|gq‘«¶láʱ1 ºN2•bJ–Yzä›6‘ÉåÂqÖÃæ.Y'55Ea~«P Ë´„ •N÷7’^êÃß›‚F–eéTMòx<~Ñýðç5À“ÃÔ „i2¼a‰‹T?õ…z¹LcqIUIoÚtZ^Üz«Õ¢Z­Òh4ú㦑H„jµŠ™Ï£”J8ªÊÿÏÞ›GÉq–gß¿ª®Þ÷}öMšÑn-¶…eŒ°-Ûø3«1˜7@XLá ðå%˜ 81¼!óçÀ›I0›1ȲÀ›,Y¶lY»F£Ù÷é}ï®®®êªúþ˜ž>Æ K€ó%à:GÿH­9=]}?ÏS÷}]¿Ë¶y3þpXh4f&“i'dd³Ùvxžªªä2’³³¦ÝíÆc±X„¥¥%óóŸÿ<×]wW^y%•gYE‹©ŽlUQ˜-— _~9½[¶ µ ‡ƒ\.G´õåSU‹ÅÂüüüênçõÒëó‘ËåVõá0÷üýßó¶“'y¸Ø|ÿÔ)v_uõ™&VVˆ®[÷-y½VCL&‰7d›MrÓÓl•$,V+Z½N8¢pÍ5Üyø0’͆×ïçΉ ÆD‘}}ìííåðÄ+µUM£3ÅãtRª²LP×YÌåØÜÓƒšÉh4y½e§®3û䓸°ÙlT*‚¡PMÓàÖ[oåÿÜy'oÙ»ŸÍ†_Q¨MM¡ÄãäEÝj¥SUù½½{¹ç‰'øÈí·sÃ7²sÇL‡\£Á-6à²Û}> «‡®£W«h²LCUQ¥½‹wvtPZ\D«×iÔë4d™Ù'p¹ÝDvì ç¯øO³ÌÖ¼é-ࣙH$p:tvvþVDÿBnw¹èÚ²eÕ´ I¿0=Uo6)ÏϪVQu¢ÍöŠÊZ¡7›M³P(L&I¦RfSÓHÎÎ"&“ ‡Ãä ÒKKm;éþ&ŸÏãv»ÛùZFƒ•³g‰utP±X¶n%•N›Ÿ»ã.»ì2n¾ùfTU] †o›4MÃiµbëî&_.C ÐF ÙívÂáp{Yë´†a†B¡vrf­V#‹Q­Vih!»u@ ø¼ÝNXÓhD£ jÖcÇP-òµÖü÷™Ÿ›C’umi ¿ÍÆÊÙ³”5B¥‚¬ªÄ½^vÅã4ív6^{-É[n!uàû"ò¥iY†j•‘K/E7 JªJ¹ÙÄ‹‘H§)®în U¥¹²‚Y¯£«*ξ>¤J¹ZÅf·“Ëå°ÙlX[Fz³Éºxœ‘ÎN¾ýÈ#ô¢ÇƒM–™>{Ow7=ë×Ó­($ÇÆè«ÕH+ >ð¥|ž§ï¿Ÿ|2ɹtMqy<(šFÖçÃå÷…øýí_8¦’Í".-a¦¦V³¹X?0@-™¤ö¬éÈ/Ò0®Ëò*4´P ‰H$°X,Äb±ßºtÒŸ[àÿ醂 Hr£A¡\ÆÕÕÅ ý zµJ}qSU©,%“4&'k5fd{g'I"‘HÉdÌf³I$¡T*±²°@=F6 äFK©„%ATUR++|ýî»Ù¸q#·Þzk{AÉd2f¡P@7 ³’Ï“™¥R«áÙ¸};þX¬êY­VÛ#®|>O­V£R©˜š¦á÷ûÉf³«AzÑ(²,“/˜››£ïª«¸#•"•ϳ骫˜={–ëûúÇÆÈjI¢îtâ[]<Û·ÏG­·—é‰ ²¹#±©±ž¡™ÑQ¶lÛÆèì,¿ŸéTŠ7½ûÝX¦§ILLŽÇñ Q)—©5BÑ(y—‹¼ª’ÍçÙ»o0»°@CUiøýä4 ¡XDYZ"ÚÕ…h³µ5Ý¢("×ëÔóyĹ9nÙ±ƒÿ³´ÄùBr9B““j5¢H^/G{ ÏÜ!‡—oÙÂÓÕ*?ó>Ïó4ðh¥Â—]ÆŸÝb!A´Ù( kŸ1¢(’Ëf)ÎÏóxÈ4ä³Y¼.ªÇƒ*I˜¿ä×R®VÉŽŽRœ›#g± ]vÑhôWŽúšƒX%I¨–JfÁj¥Q,âñxž÷ÔuÝ\¹p_¹ŒÙlR´Xð9D#—‹%A g`€p$‚(Š(ŠB*•Âjµb•$„L_.GµX¤ìp +šFZ–9øõ¯304ôœDŒ5•]±P yžtk IDAT:=M¬eV£Q‚XZ D¯×+N_ ÀÑ'Ÿ¤b±Ðp<‘ C–m62Ù, ªÊ‰ñqölßNO?©d’pW"P/—éîîÆ!Š© GŽàÚ¼Ëꨩ­1l&ƒÅ4ÙwÍ5|á«_Å55ÅÓå2WîÛ‡šÍR‡±vtàš++¨»¯ºŠìDóy¢ÀP?]ÃÃÔWVhÊ2^¿w뤴&±Åbx½ÞÕ/© @³‰'b(Gª×)ÎÌà²Z‘çæ¨‹¦7ø¹Ž/­Ùdif†Æâ"YFšªÚöÖ«ªj®5~[Šý7jVuù|87m"¦i$“Éçgˆ ‚ÕºzŒ/ÐÝn"Ȇ­Ùds_®@€F+¼o­¡V¯×ÉT*È…1Qd-ÉòôسÉ$Éåæ›o^u¢)Šù O±P*•Ìé±1Œ¹9ìÍFƒj±ˆ”L¶-–©TÊ, X­Vl6>Ÿ¡¡¡¶»Ëb±àp8XZ^^ÿe³è33øw:Öhà×4ÎŒŽ2ÐÝM©Z%Ö×GSU±IvMC‘eš-%^½V#}æ –ÑQ:Êeª@ZU‘"ò-=ø@W©•‹…þpK(Äу¹!!ìp þX õâEºc1Âã33X¬V6ƒ##L..ÒÐuìÙj› àŠD0Ëeš¥Þ\M(wuaAUU¬’„ærQôùPªUjN'Ói>cI&ùq2ɾ;õ÷S‡™:t¡¥ÛÏ&¤††øa6MÞíµtoÝJÙéÄ[* …(d³H==í¸a¿ßO¹\^õ‹oÙÂØÅ‹¬ß¸‘Ea¾^'nšŠB%“y~±•a ª*²,“H$Vɵv;Z$‚7ÀîõâÛ'5Ã0ÚvÚ|>oÚíö¶ jí +W«(²ŒÛëÅîrý—™ýF ü™¿|©T2 …Âs°LQê²læEK$‚a³as»ñîØišØ[‚Ã0Ì5°ƒªªíÈ›d­ÆÜÂXŒÙGÅýïÿ޵Ñ`q×.n½ãŽUÁM©ÔîV ‚@2™43‰òÄÎzéJ…ØÎŒlÚ´:–jÍÔUU¥ÙlÒh4Ú0CA¨ÕjLMMÑÕÕ…ÓéÄïõRš™ÁR,2{ê¶|I×Qm6,€G׉Äã,”J$ \.좈,ŠD[#¿–æ½Z¥P,R)I»Ý8N6ø|ä‹E €hšèv;ªa`ˆ"—nÜÈÅï}F €+c"Ÿ'dš Š,LNRéêÂ"Iä ¶D"ô‡BËe"~?Ùz‚,³chÍÆéj­Æ@(Dna|Ù™þŸ×½Žf³Éèè(³³³LNMQ,•@™V-W_Íν{ɤÓá0®¡!ôFO¥Â£<À†uëxóç>Ç…óç9xð ¡h”l&C§,cš&¥rïà õF_+x±\. ™žž^MP] Ø Z­R3 ŠªŠ$I8Z•µ¢V…l6Ûþ\ív;ÖPE”xE–‰y<8œÎŸún­Ýïµ…aÍ‘g³ÙPyj Ÿi’‰ÿ7HYùø³‰*kt’Â*ü§ N—KX|Ôëu²Ù,Á`ð§ŽõÏ|½a¦Óél›+ƒƒä Jÿöoü‘¢0 ¬T*Ä;:Úꨵùu³ÙDk6)g2„Ü.)]§érQ,—±J’$µižÏd ­Õý~?+++¤3|>Å\ŽÜø8®Z ±ˆh±0[¯£Öj¤*ôÖóbEÓÈZ,øìvêå2º¦1è÷·‘ÑV»²,cEr’„÷xh( Ó$ït¢—JxNÒÍ&¶žzˆÆbŒãê줚N3vþ<‡ƒ’,c˜&Ë‹‹L%“¬Û¸‘D©„¦ª˜ºŽ!’„-@±ÙèëíÅL§™J§yâÐ!nY\dúá‡ùÐCïïÇívÇÙwíµ„B!þUùÀÉ“Üü{¿Ç;n»m5fyÕŸNÇà gfX8~{©ÄæH„ˆÏǾk¯eaaÏýþïsI¹Ìòð0o}ÛÛØ2wލ$¡8˜ é|G4Цª4-bN'UŨ×QÝnº:;Û¿—af&‘YÆà‘eÌF§ÇÃÅ|žB­FO €^­â‹D´Æ;®@€¾uëxúÜ9v ²gÓ&ÎŒŽ‚ÍF6—CµÛ)--1ÔÙ‰bÌÉ2n7Õb‘ÙJ…¤ibTYFQU"á0iEA,—y9àSUNÛí¼ÿƒÄÓJm4œ?ž‹ãã\÷ªWñÇögÃa!›ÍškÒ\ÉjňÅ8¹¼Ì+Ö¯§Û4¹pè[¯¿žx0Èk Þ­ë|ìÂs9.Û´‰¹•:ûûÉ·Yך›]]]d³Yt]çýûýìgUϾ}ܸar6K-›¥ó²ËˆÅbkÇê_éèü|E_­T̬,S¬×ÑöÉáwªÀuÃ0ØVV0M“l£ÑÙív¡T*ýLúªªifâÌú ƒùãÇÉåót #´ Íjµâr¹°ÙlØl6êõ:ßÿþ÷9~ü8ö‘P(•øÆ7¾Á>üa¼O;^HÓ4s­ÑÖh4ÌÔÌ zOw7}}?%hxæê½Vôk]õµÂ7 ƒüô4ŽL­\FL§ ûýdTÅ0°:¤–—Ù³gÅzSgÏâÌ@Ñj%£(„­VôÖx¯ÑhP_Y!ÒŠN tE£x% ›¦aÍdÙí”4ùÅE š†ÅjE)—ÙÔ×njñ°i3Õ*•r¯ÕJG¥B(B¬Õhj¾z» `:œ2B®P@v8pÛl”ívþÜå¢ ò–Ûnc ¯¯=ç/ <ð“Ÿ0äó±Y’¨MNRÌçÍâªh©-ö9tô(ÑË/GZ n˜š"7<Œ7â¡PˆÉdHrùºuœŸ™AÕ4ò““Ø»»qº\ídµZÅçó!X,œ}ðAÞŸLâ>øØc\ÑÙIφ H‘`g4­=%·›Ø¶m4—ÛÍ 1à~« Ü4M EÁ!ŠEd›ø3г^¯—Z­ö³ƒZ)Á@€´ªðûÛ9ÕÏ\YçææÌ¯}íkH’ć?üazzz„L&c>t¨mìw»ÝT*•Ÿr“%çæÈ:„C×Éd³„ž5‹~¡ãÜ3 _UUꥹ9’gÎàu8(+ %M#Ö×ÇìĢ׋3 kp'ÆÆ(( ÖÙYzúú°û|X¼^”F£½+V‹E,ºN p…Ã,—Jäëuºý~ôL†l.GÑí&'I”R)$«Qp‡ÃtõöòôÔ§§Ù–JñÆFƒÒn7±þ~æÎ[ %(™¬V™Y^F<{–aÃàþûïÇÒlrÉæÍˆŠÂD:ÍË{{qŒ`Û°—½ìeÔjµöQõĉXD‘ „ìvlŠB!›¥Úh´¹iÄn³qÝÍ7³rü8¶ÅEüV+K§N±qï^¾°nú 7ðž|€¾în&>ŸÜÊ ƒH6µZîVtQ£ÑàÀý÷s~y™»% [³IlÏ¢×]GÙfÃa·c¾îë×ÖSúoØYÿµ¸d±…tÚÌ+  {<ž±cÿ¼ ÉbÁ?8Hqqw4Êú`b±¸v4n c>ÌwÜÁË^ö2^óš×´m—‡U:ÇÚMYCH)ŠbÚìvÆ&X¯C­ÆR£AÿUWµ»ì?Kõì>€év³’L"9,V*XD‘ŽžRÉ$g&&¸òê«ÉÉ2…R ¯ÕŠ[×Qj5”f»ÇC ÕÁ-‹¸$ ›¢N&©g³lêéaÒnGñûyêôiÆ&&èÞ¸‘Îþ~\Í&Âø8‹…@|˜žž‚>©TŠS§O³÷e/cáÌ–+——Ñ…ªÅB(æé§Ÿf||œÛn» ǃýzЉ¾zG>Ïþï|‡J­Æÿâ/^¿žé©)òÍ&ÕÅEê¦I°³“†a048ˆÍfãñÇç{ßûš¦ñÎ÷½òä$'Ÿx‚ën¼‘‘={(œ;‡·T";>þ¼ìßåë7ò îDpµ¼ß‚ ËåH&“mF›ÝnŠÅ¢™ËåžsT_ë˜{6@¯X,¶É)²,s×]w199É»Þõ.¶oßþS…çt:ñz½d³YžñwB6›5‹Å"±X WÅùy*Í&u¯—ÉS§ˆX,Ø"‘_ø RÌ娎2äv“ªTÐ »Ó‰¥Ùäbk,ÖÓJ¶H&“läÌ㣪*óù<¶xœ` 08;1A3•bG0H¢Hµ^G­TX,—ùï~_©ÄX&ƆV»í-¿Õjr…‚YW>Ül¢ïµX8¢ëÌ9t‹Øæç±W*ˆ¦I½T"­i¬( Ç$à´ZYL§W9ìvFÖ­Cæe™§ï¼“áJ…‡¯¸‚7Ü~;<ò±h”áááÕ}y™Þp˜¹•Ê« töïßϾ}ûðù|¨š†?EîìDK$óyŽ<ø ýƒƒX,–Ñ ¢Qò™ Á ¥©)|»w“N§¹ï¾û8{ö,{öìáúë¯G-px½¬w¹xìèQº»»q-,ìïÇR«¡½ÓïÅÿÍet]7K¥étšb±hú|¾v,ÍóÑWŸÏ‘pôèQ8@GGýèG‰D"Ï)DI’„/|á mx@{ÑñûI$T*¬N'Âð0Ñ;piÕ Àj¥V(àx¤Ï³ŸñcÇ0S)”R‰T©ÄÀæÍXE‘‡¦®i\³mr¥BYW=Í¢HÉ0pZ,¼^A  QL$Ð&&P¢QÎær6IA Y¯ã0 r/ò‡¥|xq‘ã'O2Ôj®Ñ[œ-¯ú—Xõì×U%`š„ÊeÙ,.M#¯ëTææ¨kN‡ƒ¿µÙT•—ÅbÌML.—yã¾}„$ ¿ÃÁ.^äºD‚?>úôÓüðÀæxÛ[ßÊàà SSSTM“t2Éâä$¥XŒ‡ÑÝÝÍÐÐP;˜A²Zq J$¨ÎÌà,•Dl4Pææ8qþ<9]§h* ‚ÅÂéS§xêÔ)lV+oxÃèïï§V«Q“e†A÷ð0é3g(ÏÏ4M&''ñìÚµêÙ~ñúÍx³Ù4k• BKè"Y,švÛáp´ç“áp˜p8Lr­ó33Í4MãÈ‘#üà?àꫯæo|ãê¼úBÀsžñ­V«P*—Í©³gqf³8,l~?Ѿ>fVV(çr$ * ‹EÓ³šœòüxŸR‰Âì,ÕTŠz2IÍb!(Š…™|žÁ®.¦s9¬~?EYÆ Ôey5I´TB¨V±·xA d·SYYŸ»ÇÃp,Æd¡€d³155EPšªrðþûéŠÇ‘$iÍMgž<}íØ1¶ß±Z©iôB*…?›%%˨¥¿Ÿ°ÓÉæŽV4 ‡ P)— ;\±y33çÎ1Ólbóz){<ëìDI§9f±0wÏ=ÜrË-Dc±ÕÙu©D ¯¹|žº,3ûÔS”…^ùJTUmK]- þÎNªñ8™§Ÿ¦;`0%9:ÊÊÓO3vñ"‘—¼5åÉs瘙¡`µrÕ7²uëVü~?N§sutºq#µr™z­Fÿŋ̞?ÏŽ}û˜ŸŸGÏç)®¬üÒLþ ü}¯…ÌÍaʺuퟵ]YÓ43—Ë‘H$ð¸Ý˜ªJ2‘xÁ›’ÉdÌüÇ$—Ëqûí·ðû™Ä> 277÷œ÷çp8ÈÏÌб²BY)ÕjlŽFéܵ‹ôÂ’„ÕåbqqŸÏG¡P0×fáí=Ó4gÎCJ¥ VC²Z Y­|ïÞ{YL§éˆÅعc£­¨ž Pª×qýÁ g²YJõ:Ã.QÄ3çrQYY¡;ÃÒŠc.f³|wÿ~N,.ò$ð]`ÖbAE–WVЛMþä}ïã•7ÝD½R!=?lm69)I¨’„Ïïçb:‘N3¨ªü…apºPà˜Å‹…|µJ3fv~žM.ÕB¨Í†5eºT¢cÛ6†ÿ÷9ú4CÉ$öóç?z€?zÏ{ðx<ØœN ¹éL†ÅÉI¶¾â-gßš* V£•s¦I¢^ÇgµRYZb±R!¦i\¹{7âÖ­~â æOžä%ë×Ó=2‚»«‹H$²æó¦X,’H$¨×ëT’IzÝnyüqúúè²Ûés:)//£=«iúbÿúFd(Ù,Ñè`y|‹Û¢(æ$bí;J1ñØcL“‚ `µÙÐ4­LagÏžåŸøƒƒƒ¼÷½ïÅç÷“m)мn7•R‰&šŸ7^/Þ•#súôéç<ߺNÔë]U+é:’Û½ª^2 ÝÝÔjµö\½ÙlR­V©T*x<žö˜­©iT—–Z†ˆR³É©ÉI6ž8Áë:ÿ¤ª,çóXªU²õ:5M£–N£u«•T©Do½¾Š'²X_r é4ŠÍFªZåœxê)Ò¹›×­Ã‰œe$§Y©*©—ËTfg¹óÎ;y¥ ðiÓä»À$‰õV+ÁxœKK¨-ÞfÓä¦É“ ²ÕŠØh LMái45 »i²a×.vìÝËwâÞï~—£Gްc÷nÌd’7lÛ†b2…UࣷQ …Bí>‘HP,Ûè¨H$‚ªª$ òù¼Y*h‹è­HÛÀºu”“IbºÎ`V*”T•Íë×3;5EZ’¨* ‘`¥R¡êt’.•¨T*H’D<'17‡¤(LÌÌ™ŸçñÇ1šM®¼ürÖ¿ô¥l»ñFÆÆÇyøK_⺙žE¦ÉŸÿKVCý€èjG½Î™3XN"¢ˆ¦ªþ˜Eæ4 snO©Ä†¾>±PˆíW_ávóÐä$+ðo¹ßÊå¸ki‰§“ÝÑ(¯Þ²…Æúõ|ó[ßâáG¡–Ïq¹¨7›0?OS×)8ˆ­ ÇC*•¢¡itlÛFflŒ¥T ¥ÙäðØã™ ×Üt»wïÆ!Tçæh* Õ|£…Ä6MI’°Ùíh²Œ¤(èn7놇9râ=ÃÃÄœN¼ñ8Jk‚òâõk.pQMÓ̪χ[ð¬áÛÑ5#¿,ËÈÕ*•f·Å‚»«‹s‹‹Üý©Oak6ilØ@ÿúõ|ü㧯¯ï9”˼ÃA£X¤`qA–YX\$e±°²²bV«U4MkÊÚ »Z ³^Ç 9ô  ë:¹\‡Ãßï' ‘L&Éçó¨ªÊôô4‘H×KµZevt”êø8B6K½ÙDj4¸ld„§Rü$“! ±ÅáÀ$êv;¨*’DµXD6 tº=—·Z­ÂĹsæÄÅ‹”Ob®Z¥§¿Ÿ]{öйq#yÃàܹsÌÍÏSX^fP ¢¦Éý.o‘$NW«„­V¬²LH×1«irCëFØjš¼?—ã>A îõÒt»S* NŸFóx8W*Ñ_¯Ó Ä[ÔØÝ×^Ë™ãÇ™Íç¹thˆ¾–ÛëСCÍ&—[,„ΞeÜëåR@ŒÇñŽŒÇ <ùä“ÂaŽMOã0Mú ;tר– …r¥‚1=M·ÇƒÜhдZ ´`Ã0¨V*«¸.Ã`na-¢6;KÓãÁ Qœ›Ãûß@#þß¶É&Š"î–iÿ…R(Öô½¯—b.‡UUyâk_ãžx‚8ðו ïøÌgèëí}Þ.¹a¦ÙÝÞl’Œ\Žîž,Ñ(¹\ŽB¡@©Tbvv–d2iJ’„Ýá °¸HPU {½\È癞¦§«‹` €ÒhJ¥p:D£Q²,ÇÉd2¨ªŠ®ªJ¡‹4k5²Š‚´¼Ì¥—^J, ¢(ÌMOsqj ÛÖ­«,ðBÕj%¯ë,ªÅ‚E’Ún€ ss8Nžä³Å"ÿœÛ¶ ׿Í<9=M±TZÂh—‹×T«ôj±ð6Ÿã²Œ;eÌf£hš\¾q#aU%æp ;œ:{– 8€›M“MÓäÔð0ƒ{÷²gëV~rÏ=l¿öZš¡l–I¯—?}ê)žÌfqY­x 6nßNAUyèÌæ¿ó®]ZÂ&<êpðæb‘·(›ž&¬È2îÖúàÌH.ǵå29|áèQ^U©ð÷À]ssÜ}ï½h7ÝD,"â÷‹Å¨;xzzðúýí‘éÚŸF£Arf†B2Igg'Ç''qlØ@´¯õyLM/ø¯áRUÕLNNÒ,‘V9ÛæZ(àš3+›Í¶ÕNN¯—F©„Ýã!˜@¬¯¯íþyc8MÓ̆¢`³ÛÛ9Þ…BŸÏG6›¥k5邆ª’-•X^ZÂ-I”›MRGRs¹p÷÷Ó³};@ mhp¹\m,óÀÀÅR‰•éiÌR Zÿ?âõR7MÆÇÇQ-p8èêéaê±ÇX6 ú{{±U*E‘‚ǃÚlR/À4)–Ë”*Æ ¡Ôë4·ËÅá‹ Š"놆uIUå Xý­EøÇ’Ä– xùÍ7ÓÛÝÍôÁƒææ8öàƒ„—–0.½ýª«ˆLOsÝà Ú±c,.-±îå/'40€øä“ü¿Í&'|>žv»ñÔj¤=6ÅbÀ±“'™XX —Ëaïj6ù_€"Ëd¶ogn~žïåóœ–OžäAQ¤pÉ4 Ølt ±Åã¡»»¿ßÿSæ¹X$*Ëlîê¢yÍ5|ûûßçõ;v°~d„bë{ôâõk.ðj±ˆ˜HÐuf—–(©*Þ–àÅb±´µä­ ªöº»«‹ÿù—É7ü~î:r„}èC„~Žÿ™ã¯gïð÷wg꺎«e0t±\¦Q«Qt¹°G"8J% …D‚q‡§Ë…$I‚@:nï…BÇCnj 1—ƒrÑí¦³³Õëeni ¯ÇC¢^ÇëtrÍK^±#G0UŸ$‘o4V‹Ô0¨+ Mcp~žO¾ã<<=M¡TÂ&Šü»a0tx½¼lhˆÎ`[o/Y§“üø8%Eáo  >ª(˜šâ^«Óf#ÐÓƒÓëE¦ÆÇQóyLÓd`çN?~œ¿n4¸x“ÕÊþ={˜™šâ728<Ìâä$‚®óøyv–wë:zì1¾ÕÓÃW_MP×éݽ›ñ±1N<òU«• ‚Àg-ÎK·¼ýíŒ?ÏçNždC(„Óf£êp°®£ƒõÛ¶191Á«^ýjÆþå_øÛñq ‡é_·Ž'R)–××ëüÄåâõ¯|%ή.Ré4KKKzôQ*­ÄÖP °884„ˆU«X¼^¶^r ?:uН>Ì>›—ÞtÖwïßÀ3¸$Ñ AÀåóîèÀ ¶ e-.¶R© i.—‹H$‚Õj%‹ årÙü›O~’®îî_ù½ƒAdY&‰𦙓££dž|’°,“Ó4j;wâ÷û©M¯—χ®ëT*dYFmÁÿÖÞo#•"Z*án6WêV+çffP].ÂÁ ‡uad¥‚»eõ<5;Ëö”-W¯S+Y×lòR@|üqN ðÊW¿šÊ‰pú4f0ˆ·£ƒ®áa.Ý´ Õë¥kæúúx`e…Ë.» ïñãÜ ªAMíi|bzšW¹\8¬V¦U¹V[í8—Ë” ƒ0t4›LŒŽ"7›LNMñƒ{îaqq‘§WV˜)9l±àÕuŽh=]]\~饔žzгSSL-/ãE”VÞØçÑT•Áýû±oÜ·áÎN*ŠBìòËÙ}à rûíŒó™Ï~–`³ÉÇ*ðO=E¹³“—^që4n Çé$´a;º»½ë.bñ8™t‹ÅÂl­Æð¦MôõôðøÑ£sñ"Ë›6ñG—_¾ªËÐuÓ0 ’ÓÓ‰¦ÝN`däÅü?{3*““èJ €+Áa·‰DVÃâ~ùà† xôÑG©T*¦×ë~Vqç“Ij©’ËEy5mEQE‘ŽŽt]' âr¹„b.g®ïîÆ¨T¨Õj4 sMiõLv—®ëX­VÜ-d’$I(]],OMÌfiŒSXZÂèè@h6©T«èÍ&¥zÓbA­Vqª$áäÜô4ï(ø„iò)àÖ`-…ÅEÜwòÕWS­VÄ/ŠôÚl„::xÓ-·pðÛßfúÜ9Rùñ –r9ö …XZZ2‰FƒÜè(CN'PÍfÛêÊ ü—S±™+££„tš,SÑ4"6üÒq2Ñh]×) ?óuJ½Ž<3ƒ¿Ñ`qr’\¹Lçà ápø™14m±‹d³‘,•0“IšÁà*änµÑÀ4Í6Tq-±äù0?Éåes4—ÃÓhàt:qù|«’V*†²Œ7¢P*a( ² €ËE `&Ÿç< {<¼íÿo.Ûµ‹¿üèGÙ¿?ét»ÝÎn¹…Ýh‰§ÆÆxêøqfK%ºvïæÖ«®âüùó|ãßÀn·300@OOÙTН{w=K0bë¶mì½ñFÎ=ôÿ³Vc¸µsŸFZb˜/ w»\ȵó'NzE‘¸¦ñà6à=€·ZåÓgÎP.—ImÜÈàºu ¸Ý ¼˜Ô4 Qdzv–M7òÀÐÙÙÉÞ½{q¹\9r„]»v_¿žZ+ŽÙ iM]Çoš¨¹4«÷¤U—‚ Ð54„ÒÑ  z½H’ÔÎF_kê†Å"j>Opú|ü®÷¯µÀAÀî÷SÊf].Â- á/û¼ã÷û‰D"LMMýÜך-õœ×ëÅ}†»,ŸÏ›@»À+…òÂb:M¦RÁ³y3¯×ZîµaÐh dY^›ƒ¯btE‘F½Îü“Oâ+—‘s9JµKºNÓí&ØB:UªUòµ UE·Ù0E‘x«‰èå!ÓdÙjåúp˜Án¸áŽ=ÊOôÐCìܹQÛÚsW0ˆÇnlj 4›/h~ñúæÅ7Õ××·êÔzij‹Ü*IÂóDQ@›ìŒDXÿÊW½ùf6½â•ÑQäóçY™˜ T*‘ËåH§Ód³YEÁápÐÑÑÁÐÐëÖ­£oËœƒƒô¯[G‡ÏGÀf£Ùl2;:ŠËj%—HP©V©»ÝtŒŒ°ûúë©V*ø}>>òÙÏrÇÈK¯½–»¾ò†éõz…m##¼·Ze¿a0\«± I|©£ƒ? …Øøîwó‰}Œžžzè!Æ/^äk_ý*YF¬f™% ~pÏ=ªUŽ ÑH¬VL»¸ÍÆnQä{ÀíÀU¦É}ªÊŸ«*¶ÁA.Ù·ho/ßݰôôðt½Îï¿]×¹á†ÇãœîíåËÀQäÒW½Š§®¸‚3é|žÚÝw³_UyçÊ ããÄ;:ð ñ£S§8ŸNSt:ùàÏt“0?;‹ªªæÚóòM7Ý„a<þøãø|>! ñŽwíÂÖßOÝçËñbqÿ–x Àét²¼¼ü+‹]ÖÀ¢( PHèìëœN'µ\¿aàP ss4Z³H$ÒΪZ¾Ôj5›÷ì!¼mv¯—° ÐÈåè:^I¢žÉ:}>6m¢Ïln7Á`põç¹\B,Þÿþ÷3==Í·¾õ-Ò™Œi4¬Ö·ºÜ—‹?øìg¹áŽ;õö®B#«U\ Wô÷sE4Êú øsài"ib¡À»L“O#~?=7âr:yã­·²Ój壆Á»[Ýô:«rWH§ÓȲÌå—_Î-o~3îînœN''NœàsŸûñXŒ®®.úÞùN¾¶};©¾>Þõö· hÔëØl6ªºN(ë:SÓÓLOOó_ûÿqÏ= ƒD†‡¹KùWI"°<6ÆJ"A"‘ N›·Ür ÷ÝwËËË&«Çt!ÒÛKpûv"Û·ˆD^¬Öÿ¿è¿®Ëáp_þò—Íñññ_¹ÀWVVžwçO,,˜³ËËŠ‚{óf‚¡¸Ùl®Š"ZÇügr¶ssf*Ÿ§¡ë$ÇÇI,,à°Ù ÑÀ‹á@b`Ǻº»Û-º®›kNº7½éMüó?ÿ3;vî$ÒÛË?ØíÜmdp”Ëüã?ü/}éKyôÑG9zô(“gΰ- Ç4QÊe¿Ÿ’ßÏb½N¿ÓÉ®2å2“I¬×o4X˜˜ »»›Û¶qèÛßæK¢HVè\¿ž¢ÇÃÕ'N »Ýä³Y²óót_q•J…·½õ­üå_þ%—\r ‡æâÅ‹Äb1j²Œ'$ÖÕ…ÏçcaaX,†Ïíf¢¯Oõõ‘Õu&|ïíßK¹íškØ5<Ìb2ÉÓ==øK%®‡©ÏÌà¾ì2\õzR©D4¥··—¯ýë”ËeÓáp ^¿ÿÅ]û·­À9wîܯô3:;;9zôèsžå Ó4“33x=¦I1Æ‘Lât:jöý|€`ö±qÛm·1??OµZmhr9¾ýío“ËåÚ)4ç>e—‹ñà²Ë.#¾¸ÈÄSOa‹Å8œÉP’åLÃ4ñË2Õtšpg'n·»-PzÿûßÏg?ûY{ì1NoŠUR©”ép8$©}O Ã0UEAi6ÞÄf†˜å%üÿœã‘G¡X,š/O%ù]G(¢Z­R{…ˆS× ú|˜VQ¤«« çïÐmu8‚,˦3aiï^Üóó˜å2ª$Ñ¿q#cgž‰/ V«5²ÂeQ 6 üT 2ÓÓìv‚?Ì›k5þàpµÊ̶mtŽŽbEªñ8ýºŽenŽ®ÁAÖvu1Q«QN§!$`³‘VÔ`cÅ"%AÀîpàöù(‹Ô a0µw/Ý’D¸ö@Îçq»\ô÷÷SK&¹¸xÊá`ÙEñóŸÿ¼á¨››ãÁÛoÇ Ìœ8AUÓ¤V«qÆg°cÇ2™ 6l #fff† š¼<¿ßÏô‰ŸžfûÞ½,Ÿšâ:ÓäFIb—ßÏÅ‚ºf þ~*•ŠY¯×ŽŽ®¸â î»ï>¶nÝŠ£˜ÍfÛÉD"a&c1 ““”fgÁf#¼|9ÑU«Þ¨æÿIÞÑÜse2™×üÎæ÷ËÉ.A*¥’™©V1…Žþ~$Iúï¥*ŠY.¬V¬V++V ùý,ÍÎâEÂÑ(žæ‘ÕÜÜ\[ '6qKKKär9¦Ÿyzš‡žx‚‹k5Þ ì°ÙxÖéäÝ_Ìõ×_Ïž'žàί%Ÿ§jµ²+•âÐ#´|§Ÿw÷íÜÉ‘XŒ÷^}5»ž{Ž™xœÊÔõŽ⊂lÌærT5 ÕbAv8ÈïÝ˱ùyÖžqY­ŒnÙ­{öð‹j•ܺu|ûÆùÏÿüO®¿þz>`š\·k7#ÀÃV+ûöï'à÷ó­o}‹d<ÎØÊ•œþùLOOsÑE±yófªÕ*©tšr,ÆÚ¡!ªÅ"©)º/4h²²La÷nÆ}>º—-k+ÓFGGñz½Üu×]\yå•íÀGhäÖåóyÓÓX§§ñçr8‚AlÁ åß"Œz£Àÿ› ¿ß/Üpà æïªKÿ5{ùv诿ÛM÷Úµ ÐËL//º®›‰ÉIä©)2ñ8ZGÎÞ^ì’D¤¯MUYL&)Œ7"jm¶vh]k&ÏçóÌÏÏÛ³K"ªª<>6ƾÅE„‹/æ-v;Ç#sìÄbHÉ$•|žK9Ãà©hµ^g>‘ »{7Kù<¢ËEϲeŒU«<Þd®w”ËU³™6išlX¿žÌì,¿|à†vÇb07GÕ0Xr¹8ýoÿ–øîÝüÛí·ó®+¯äÎï}ïž=tÐð‘GM“„ÅB)—ã¢\Ž+GËeŽŸ8ÁyçžKwww˜át:ÑD]Qt8x¸¿Ÿg²Y¬\ Šˆ6!·'ÐbµX^¢@»öÚk¹á†ÈårœrÊ)‚¦iíŽ;@g0Èl>Ov~Á0°¾×ˆKÖtݬW«ÀÂסåVà†a˜õZ C×q4|׿õB255õ{Íà~¿ŸÔI!'7Ú~×÷ÑuJ,FõÈŠÄD늇‡QUµs,IíU@+âG–eJ¥-Šl.—C èŒD¸òmocåÖ­üðî»ñ{Úì,wÓÓü³ßϹ§ŸÎØð07ß|3±xO­F¡y¤ö Ð!ÄÝnDEÁ£iTh˜Z<ù<½}} ”Õô4RºÕj¥˜JáH$ˆ"ÆÆ8’JqÁÚµX²Yfææ(G£Œ6=òZ3ä±Ûìììä¼óÎãß_>ñ„9=3CG8܃ÔÅn§wÕ*E¬ÍÕÓ«ºa˜©ÙY”ùy «•ÀŠoÌà¯us9 'N`QUl½½¿»zÅŠüô§?E–eóµ½É²L*™äþ{î!™J™‘ˆðjJåb‘L"ÁüÂÕãÇ)%TÝn,N'¾¦yâdjMK× éK§ÓT«ÕvãnÕêÕ\z饌-_ŽÍfŽ=j~ëÖ[9{d„A`÷£²hµ2íóQ¯Õøœª²(Š,•JLNL06:ÊÕ×\ÃW¿úUžØ¾\“–âp8Àïg_¡ÀYÍåp?pƒ¢0þüóœ XÏÙl úýør9ü@·ÍF ™¤sÙ26®[Çâü<å©)®£A—y\¨nÜÈ›V®$}÷Ýüð °Ä$ _"†Ïç£É’Çëõb ‡1âqPU‹‹†G×ñ•ZZ,Æñ#GКץEÝmAº{{9vß}Øÿõ_É­[Ç57ÝÄòÑÑFSMñG"xªU,n7Òk<#Wd™ìä$ÁZºª2gd²YSÅVÓKS:ûë"† ÓÄòk~þGSà†a˜KÇã—e Y&37G¨ÙeþM£¯9;üÆ Â_÷;MÓüî­·â¾ãÜŠÂ7óyªµšéz²Åb6Krï^Šñ8±‰ |á0µl–h €èrQ]\DmúÚ5Mkƒ‹Å"Åb‘z½ŽÇãÁëõR«Õèêêâ‚ . ç$›ªßëEYXàÈÄeàÎýûéèïçí×]‡?à¾`brÕÊyçžËää$Ï>ôçÇbT.Ùµ‹,Äæñ(¸¸Ï墫^瓆Á½À À œvþùxƒAþêÇ?&“ͲR¨ŒSH§Yëpp(üƒÝNYUɇBôÌÎϳ^×¹°YÜ÷®¦Á£•¼*IRCË_«¡‹ø¬VìªJ‡ÕJ´¿ŸªÓI"“!h·ÓÛÕ…ßëÅïóaµÙЛxç–Idbr’s––øT¥ÂÍ»wsèÀÖ­[×v“¥’‹‹XÑ߀÷z¥Ï¢¢(”Ëe‰u‹…š `88ƒÁ´Vk#ÅZRåL&cŠÍÂo%ʤæç‘³Yáð[ܤÀ-‹ZX0Ëñx#lð‹¿õ¢øý~¼^/óóó¯úwʊ‰]»¸ªR¡øÌΛÞèße¨ªjŽ8™ Q·›¸Ó‰ÏéD ®(øËeLA °¸ˆ+¥R.·ùßõzWÓE¦¨*õZ¡¡!¶lÙB04M3«Õ*ñxœ‡î¾mr’±ÕR‰†ÁîÙYÆOœàÿ|îsH’Ä×>ó, ÃÃÃlØ´‰üO~Â?( ßþ È?žE.–€ÚòåŒ'|4›å00«ëØ6—J\ñö·cÆb<ô‹_¯Õxàþû tu‘ŒÇ)ÅbD»ºÈŽŒ0èWärävîäYfÖ4±S4,§.Ea@Óðz½x›Å®( ‚Å‚&Ë貌\¯£”ËŒƒ¨4Ü\ƒ.ªªâíí¥§¯ï%ÐV‘Ÿ²v-OG£|jЉžÞ60@©ñ™!ŸÉ?xn‹…j&Clfæ7Êš¡}¨ÕjÄãqTUÅápFéŠF)g³Xl6|ápêÙ’,kšÖ^™•J%4MCÊÅ"ê‰ x½”‹E*Ï÷=ØÕEQ‘…ˆ×K¹y6\©TÌ_ç:“$IøÎw¾óš/NI~xçæ÷y©ZeÆã¡|R„ð+õTEÁévS«ÕˆÅb8|>Ìž]§cxOµŠ¬(,?N§$a3 ²É$šÓ‰ÐÜ«Ûl6"‘H#Ú'—£Z(00<ÌÖ­[E‘¥¥%óСCŒ³÷Å)½ø"C’ĉZ+ |Ÿ™a)çÈSOñéX ;pßw¿Ë©—]Æ Aà`Rù¼  HÃccˆðmEa¸faŒÍÆÓ}}X ƒÅ‚nì}þyž²Xèw:ÑëÀƒ££<ÿ K&\¿žçççîéaàØ1¾`|IÊM ÞÑÒ!mšPË{P­T0±ñüý™Vá†Ýb¡Ð\ÆsÒ)ÀmÛl6Aoæk[,Üår™t:Óél'‡¼üu£££<÷üó¯)oêŠw¿›¡Õ«)—ˬ™Ÿç›ßü&333æ²eË^ò>åBܱcå2y»ÐÐ8Nê==(²Œv–Ê3Ï0M:=’µY¦kíZlV+õz`0ˆÕjEQ ¹ÊÜ¡r£Ze¿i"†Ã,--1==M:“Á^©pɺu¸e™Ç9V«ñ¢i2K@‘Éd(,.r*ànzæ&æç)Úí|÷» z$ÂÔ‰üë׾Ʒ>ö1ö=öóÀb&ƒnµ"d³øûûù³w½‹¥¥%Ù¶;w2µÙØcQ¬õ:›·nå˜,³¾\fìÞ{y¼^çó€Ü|_Ñßõ. Å"[Î6SïĉìÞ½›+V°víÚÖá@WUÌR ‹¦Q®Tð{<Ä ƒ\,FG4ŠknŽ%QdlëVìMd–Ð0±mÛ66mÚÄß|ô£ÐÄ4µºìN— oo/é‰ TEÁL¥ÈLOiŸzÔëõV"-¢( …p8´rò*å2©ýûq+ — á·°ÜN.|MÓLÖ¬!—É …ÃxšpÑ?Ú¯U«æÒÑ£˜Š‚·¯8ŒßïE1³Ù,±XŒb±hz<ž—\ÈZ¥Âãwß‹†ª)ò*e'7æt]7].7Þx#æèèhûg•lO½ºNUU ‡B¸Ýn¡õÔ®æó(‹‹ÈªŠjµ¢è:^§Q(d³ôwt …Z”Jå2ùlcq_©„È)ß— IDATs<“¡ÒÙIU–qº\l9ã õ:=ºÎâÌ ý§žJÌéäÑÎNÆ*î}ànøÂ°˜&_$J1W,rö¡Cl°Ûñ]~9º(2:2‹Ï?OÐïgà ùËÇÃL›ª*oÓu>‹qøèQÞ|ñÅtE£üâ‡?$›É`±X(V«Ì;ƲžÂ¼¸c_¯ófX§[€oÑÀ<®Õ8’J1²lIbåÊ•D£QÈd2;vŒG}”M§žJO4м´„]–‰ Ý.]~?§“ñtJ%‚Å"KããÌG£"‘vXa.—ã©§žâª«®¢X(´—í­Ff¥R!çó‘õù Œxœ«•µË—ó`*ÅL>OlxG*ÅS33l9óL"‘[¶lá…^  244D*•âÀÁƒìÞ±ƒs="n7šÕŠ¥§‡²®ã«Õ8od„™T ¢Qz¢Qœ¡Þ&ÕÖ4Mž~úimð×j5jµ²,7ˆ/n7ƒ«V!¹Ý$S)tM£¶´Äs=FßêÕxÜnDQD–edY~IÊi Ým³Ûñz<¤…\¹Œá÷xÍ:à„»í6ƒ·)šªReb±N§“\.g¶œ\===äóyâñ8…BÁÌäó˜É$k€‚¦qìe™ß¿Ãùµ™^\¤–H`óùèämo{7Þx#»wï6GGG©Ë2õÎNòÕ*®f`‚ mFº­™‚*º\¸,Jù<¢ª¢f³¤âq{z°Ûíx¼^J©Y&== ©…z%»®9÷Íofdd¹Z¥87‡ÃjeQ×)Ù파±óùç‰/-ñä“O²qãF.»ì2¾ñopÇí·#Åãœd€i¿Ÿ÷žsGŽajz£VãG·ÜÂñxœuëÖqâøqR©ß ‡yÎëe¯¢ÐP.—éîîÆárñnMãó†Á—…/Y,D€ßÏúÕ«Ùÿ ôÕët =‘µzD$Â6`±Z¹uÇŽ9š5kزe «V­brrUUñûý °pô(³?ÎD:MI×ñöô+—1óy:tÀúõˆ7ìí%Ô×׿ÛÇãqs||œ}èC¸ÝnÊå2n·»6ÓÚ'WÊeæ äD‚• Jg'Î|Ã0°Z­H’„ÓéD|é‘ׯ¤¢(f½ZEr:±‹âç௩‹.B½V3Óš†!Ë,ïéÁéõ¶;έQÛ®b¼êÚkù—Ûn#fš,«V) ¦ÿFFsßj‚†A*‘ sð ¦Ifv–\¹Œ?aÓ¦MT«Un¹å®ºê*N=õTÜ>¿0ˆÝno½'²,“D8žÉr:ñ—JˆºŽÞ¤ÃHÍžÃïGVU„Liq‘ìî݈ù<8„W­bË©§Òéó5–Åå2G––™&ŠÓIWO‘ÎNN'_¿åÞöÖ·ò¡~‡Ã |þsŸÃ¦i|*!Q©PPrGÖu^Ø»—·d2|`~ž/ »ŠE¥=@Ïš5üÅ¿HpÛ6æææ(•JÊ’DçÊ•ŒlÙÒ@g7Wq¦irçw¶³ã- ÝÝÝ/Q¶>3KóóTçôÞ^J• ™ÑQÏ?¬Ö6Z¹T*µ Þn·“ÏçÍ–iÅjµbÅLMU%éuÉjûƒÍà§SÐ›Í á¤†ÅɨãE¥EôP…³Þô&–/[†Ï4¹Çn¾ùfr¹œ)IR#I×1tD"¦ë˜Mµ“ÅbA©×ß³Z±K®PˆŽh‹ på•WÒÕÕÅÝwß ª¬òù…ÅŽ¼]]¨Í½^¹\FE6}6µtšÒþý¯T0c1Ê…‚®ãt:±Z­D;;©<ý4ÖDJ%„ÁAº‡†èìêb@ÓHîÞÍL$‚a·]·4 ©XÄç÷søðaÆ_xëÔåu똜šb~f5Ÿçš /äàöíx…°=ñÇ~ô#NÛºmjŠYfÐcšØóy.oªÐþuǾõ•¯ñxèÿñ1öY,ÌÅãÌOÙlèB¥BÕ0XeBÕ*3ÀçD‘ŠÓIhpÒâ"3ù<Ëü~VG"”…ÙùyÎ:ë¬FrLó6Y÷ -_ÎdµŠ?be?…L†üô4ÁI9<ñóŸs,æ­—_NµZ5“É$³³³lß¾}èCäóy’É$õz½}¬V«K§Í¥j¦Q¬T½^Çf³±|ùr4E!ã©×é–$kÖà‰F©--Qœš¢dšä­VúFGq:ÔêuÔ\ާŸ~šÿøÑ8óða>¥ë|ý¶Ûø÷J….}²Ìy¬>å¶ïÞ‰°µ¿ŸÔì,óN'æÉ6»ç ƒI‡L­Æ’¦Qlv¿Ÿ{àF\.þZQˆ%“T,¼>ý}}œuÎ9<üàƒ,V*̾\Ž4UpW‰"ónÜHŸÇC¶V#—JÑïñ0‘ÍR®TxêÁ9û¬³[¾¼]ØÉd»ÝŽnDü~¿Ÿy‡ƒ37oÆe·³íùç9qÿý¼·XäÅ»îbâÄ N;óL`ûöíƒADQ$›Í"IR»Sïp8Ú"«ÍF)›¥ÞÛKR–é\¹’¾•+Û{bÃ0Ì–1¥ÒLxiZ´Î·eY&;;‹GQð¹Ý$+•v§þ CUU3vô(Ötš¬Å‚glŒ|¡`jMigË/Ýê˜Z›®-»$aõzqúý¸úúøä…rÇwrÏ=÷ðWõWÔ ¤X «¦‘©T׬iŸqxC!Ü@{寧ªÙ:niÍôµl–ý뜾f Ý\€®ëøý~|>ßKN‡Ð²eTff—–(äóø,FFFp9‚¦ifäôÓ©„à {½„W­¢”JQ«V‰MO3^¯³lÅ äzZµŠ,Ë<ú裨ªÊûßÿ~¦'›J‘«×Yzâ FGGÁédÁ41€U«WsìÈâN'eQ¤°¸ˆ`µÒkµ·XèPTÃ`‡ÝÎ¥€KUVU&ŠEž\4a•Á ›N;k?üaÓä¶ï~—¬a Z­ìÐuTÀ¢ªtjËðW«”}>ÒÙ,‹ù<æü<ßVîîûéO¹î“Ÿl “DQdaaT2I-#¶¸ˆ¬(H.«7n$£ìÛÇŸÎR‰ÿع“þ‘ ]çèÑ£¼ç=ïiã[MËr¹|²d‹ÕÊܾ}X^|‘~I¢œH¼$l°5ièºnV«Uòù<¥R©M jÍÒ…lÖÌiUÅbknÍÞ(ð×0”z!Ÿ'hµRJ$H8D‡†°Z,´fîÖÞHÓ4ŠÅ"µZ _ @Ïe—á°ZqøC!¡P(˜7ß|3·Ýv—_tAA@Õ4²  !IÒKl†­ñJß3LŸ×ˆÓOçùgŸå”ÎN6^|1¯äAEïð0æž=ô Ñíp ôö¶M&6›MÐ4ͬ/[†`± * ñ‰ jSSäªUÜýýøý~ººº0 ƒï|ç;8>ñ‰O`7:ÄÇ==ü‰ÇCvnŽÉ|ÁëÅ éèÀ;4ÄäÂVÓD¯T0Ýn4EÁgØ‹ dNÓø‹™ÎþÉ4ù®×9”J‘»ï>þãá‡y´éóvÙld&'9 \l0M ææ”M›¨d³Ì ©\‡®ÓôY­ÜsäûàÜsÎA–eÜnw£É67‡R.£iAQDRU>Œ$Š,E"\¯ëÔ‡‡yóWéèàÉ'ŸdõêÕ¬[·®Ýén=”u]G×õöì«( É¥%³³X‚ArN'Š,ÿj—»©›hEge2ìv;µZÍlß³uë0 §Ëõºd¾ýÁ \”$L¿Ÿ|&ƒÔÕE×òå^ƹÖ4Í, ”Ëåö îîîne 'IX…\.gÞ|óÍüøÞ{¹âœsÜnú–/'28Ø.â“ÕE/ÿ·,ËT*jµ¢ÛÍú-[á>J×SOµ·/:¨ªj{.Æô4Ši\»«ÅÒˆ1–eò…Š,£ë:ÙX [.‡¡iˆk/ºˆ¡åË™žžæ[ßú¡Pˆ«¯¾Ý0˜8~œå>kÏ>O&éÀ³lSé4‰xœBáp`ãºk®aûö휲v-‘H„z½ŽÛí¦£³“úâ"ùL†¨ªât8ÀãaîùçÙ°bŽË.cí°jÅ &''9tè—_~9‹‹‹x½Þö=´Ùlˆ¢ˆÔ¤¶H’„aš„¢Q”h”šÇƒ¯§ñ7tÀ[3¶ªªf±XlMÕ*Z¹Œ+Æérñz°·‹¢ ×ëf¥XÄçpà:I»«ëºY.—YZZÂn·‡Éçóx<ž_)î¶ô52™ŒyÓM7ñÐÞ½¼ï}ïÃãõþZ»af+Ô ÒÜoI’D4¥¯¿Ÿò²eh²Ìµ§ÆíwÜÞ˜-LÝ0¬î¦gÜn· ÕrÙÌ…Ã8›ÛÝŽ;²4W#M±é¥%J²Ìòõë!¥{p]»vñïÿþïœþù¬ZµŠb±H©\fñða"‰Î|ž¹tš¸Ë…Çë¥Z*á‹Fq(Õ*óssT“I¬’DU’ˆÕëØ, ·XºRA(—€à@Énç'ªÊW ƒ[€¿6M~F#ñ$)œ³u+§oÞÌ÷ÞËy„¹ÛœÅÂYù<‹É$u]G´X XäÒK/eô ±ŒŒ ‡Ù¸q#?þñ¹îºë°4v‚AfMk©„?¡P­²}zšZ6Ë[Î8ƒªÏ‡ßéÄ0 öïßÏæÍ›9ãŒ3H&“är9zzzèììD„öRÀn·chN'ÅÞ^Ê>£gõ[Í× †a˜^¯—ùÙYr{ö°,¤–Ë!ºÝoøï;$‡£}rr3$‹aš&¡æòºÕ¨ùuÅÝápXH¥Ræ7ÞÈOö3ÞûÞ÷¾¢È¥V«‘H$P¥‘2Ò ²³þÐÉ'ÌoÜz+…L†sÖ¬Á¼ƒƒ”J%³^¯“+Л¾g‹Å‚Ïçk3à àP(N§1ív†Ï;ÉbÁóÐCñÈ#ðîw¿›¾¾>æççñûýÔ*ˆÅ0 R™ 9ÓD2Mª¥NŸSUÑ…¢,cHF½ÎžL†„¢°øà?G·®SlªÐ®£‘>z‹,£"ðp° ø à[¹’Ó7ofÆ ¨ªÊ—æMCWU0M2é4S»wc±Ù˜/•èÉã¡{pj­ÆÊ•+Ù·o?ü0W\qÅbUQèt¹ðøýt2­iLW*\ú¦7á´Z‡Q ƒb©ÄñãÇùøÇ?ÎÈÈH;uaa™™:::èêêÂÕœau]gîøqôcÇè•$Š~?Áhôw¶k¶þ_.•2­Á »¹y¼úFÿžC×u3³´D5Æâvcia´2ÁLÓ$™Lb³ÙZñ¿õ¦E"!žH˜_úâ¹óöÛ©”J¦Ãå¢^¯·ùæ‹·ÛM(úà|cccÂÄÄ„ùù<ÓÓ¬_¿ž©\ޮիq6ÏéEQ| ˜Q×u³ÅroÍæþ@€éùy:;:xöž{ç#ùáp˜……êŠB)‘À’É0·¸H|~Ýá ”ÏSŒ…êMWI–1T¥™ î3 Ö—ÐÈþÞãv³& oµ’ŠÇ™PjN'ºjR0ÈçBM$¸ñÁ|ð»ßeÕòåLLLàq¹èðxð®XÁá#Gp¸\dëu4»™x¼Q  õ:cn7^ŸÉá@×u.¾øbî¹çV¯^ÍÚ)§£ƒ´ËŤÇÃŽñqþâ½ïÅS¯#ïÛ‡¨ëÈÏ;ÆÆ‰D" |>£££ŒŒŒ´)8m“ˆÃéD®×1ËeD› £™-÷j‡' ²lé|¾¡%÷ùÞ(ðßwTJ%êSSxêufs9›6Ñ?4„½ÙœJ¥RmøÞ«1–x].ÞwÑEÜóýïsg<Λ¯º ÉáÀáp‰DZ”‘WÕ@æýW_Í=ßøùj•óß÷>zzz~¥ÓZ…ÄãqAÀívS¯×,¾ñ÷ôÈ#õùð4„ûöí#›Í2;;K.—#?=Muv§,³¬VR@µVò• UMÃR¯S3 A $Š-$Eá!]g‰†ßÛ¢(€áaæ|>þ1ŸgÝŠ¼ÿÔS1Ãaž9í4Ž-.òÅmÛr88ý£eó¦Mˆ¢(Ü·Ï<±};ë].U•iIb,ŸÇ¦ižšbõнƒƒL¾ðóÇ3½oC›6áñz1 ‹Å† øÙÏ~F$¡§§‡Òð0)‡¥¹9Î}Ó›èq:©7=Ü%`)‘`×®]|úÿoÂáp›€S,ñz½ƒAB¡¥R‰¥¥%2™ .§½V#U*QºW®|Õ 4Ã0Ìb6‹^­"¸Ýbáÿ}G³$‰"á`o €Ýft]7S©¦iÒÙÙùª‹±V*Ñçõrõ»ÞÅ-?øÛ{{yÿ>ðšoša¦"Ëô­\ÉŸ~üãü஻О{Ž+‡†Èæó¦×ãÁn³ ²,›-!†Óé¤^¯sìØ1 …;wíBxà>_«ño…ÿû¸ïþû¡¹éŒFÑ‹E®9|˜s€Ï8$‚AܪJÉ0ÈÊ2ºib7 >ápÕá@SULà˜NcmvŽ·Ñà¦u˜&Ùd’J¥ÂP?¡+ðiÇöí#:<Ìš®.V_r wé:ç¿ó¼ãòËiy¨]’Dyq‘-ëÖñìø8Ýõ:·kp¢PÊdè_³†XG‹…ÛIEá}×^‹ÏëE×uN=õT~ô£ñøãó¦K.! 1·¸È–-[¸àÜsYzì1zT•œßÙßϾؼe ‘H„d2Ù†+˜¦I¾ai“bV¬XA¹Ráð /PÞ¿Ÿe¦I¾V£¢(ŠE2™Ìï<'“I’Ðmš˜¢HáuÚ`ûƒ¸Ëë¥68H&Fl¸®ëf:F×u:;;_SQº|>Rn7¢ påG>¶mã¿øÅk²˜ Ácǰ”Ë„ƒA>úÿÈM7ÝÄÓÛ¶á,{Ó›xæ™gÌVôoK?¿¸¸ØM°ÛÉ ˜’ĵ{:;¹ìòË‘U•ÅÅEr¹†aP*•°7÷ÆUUEÔ4äfäp·×K¨£ŸÝNÚbAÐ4„zñb›¢“eT«•ˆË…¤(Øt—͆ÍjEÊÉ$Ù|žSFGQ<æ––X!ŠT4¡H„…¹9N$X%‰”i%lš”¶oçœp‡ßOQUùþ/I·,³äpпz5eEáË_ú[·neóæÍttt°eË~øýï³ûÑGqvvâõz¹ê½ï¥´´„˜Ï#Øl8»ºÐH>÷¸â ¢Ñ¨ išÙÊh—›.EQÚ×P(„? ··B!".‚ÝŽÕåz ,â· A0 £­xš¯{½f†ÿA ¼EËÐûú0›;›Í¢ª*Ñh”Wòƒÿ.Ãárѹf r­ÆF¯—ÎU«¸ñƱÙlhºnÍ„ÊßµSnÚC+±“¹Òš5XL“Á_ü‚k ƒÏ<û,ÛÎ;žÎNB k)áf7]§Z«±‹1ßÕÅ\o/g¬\ÉÖ³ÎBkŠ6–––¨T*üìG?âçss¼$,¶tv"8ôvwS÷z©) ¾P-™Ä0MJé4nA _«ôxšä±Iª1MÅá@d«·ÝÎìü<…|ž¯—ØñãD‹DxæàAâñ¸ÙÕÕ%Ì-, õ÷Ó±y3g qçOÊ7#ŠÂ²@€’,sþÔŸ>-˼hš¼çâ‹Éçr9r„={öpúæÍ”K%ÆöìáÊb‘/þw¿ÍFr~žˆ×‹ÅbÁ9<ÌŽýûÙ°~=ÂÉGY­~†¦i´’`ŠÅ"333†‹aH‹Eá0ýáÕ¬Ð6¥…,á®®×mføÿ/ØäR:Miq‘’¢àêì¤`à5÷ÉÑ“Çää¤ù•¯~•§yQÓ¸àï@×uSdYn›*• étº= nþÖ·"Z(Òuâ»w3‹qAs†ÐuŸ×Ëå—_Îàà Åb‹ÅBai‰ÚÌ ÙtU¸ä‚ XÕÓƒf±M§q5E ­=j4á®#G0XÒçõ’v8­VlN'U«[¹ŒZ­RUeŠ©A— ¥ZÅêñ X­ø$ MUqHª$ …ÈV*tÑ‘2±»Ùäñ ‡êŠÂsÏ=×¾^dý† ØvíÚE@’xóæÍÉ$µ¹9¤±1yA–‰I‹‹‹,ÌÏÓÛÛË™gž‰(Šìܹ“í?Îeå2gÐH2ŒÇ™}øaÈf1ûû±F"TNœ8Á'>ñ‰W¼ŸÖ—QY:::PU•¹éi’é4Í#ÃN§¡@znîU%¶ÒQ<¡Ð+&Ó¾Qà¿ÇP…Òô4žR‰J>ØÑñ{÷¯###ÂW¿ð3ûéOó&àŽGá…}Á4)4½ÛÍ›×ëmë•ûûûY½z5…în\ †Á]']«qüÒKù—R‰åçŸ'áç?ÿ9‹…3Î8ƒ+V g³ˆ¥R&ƒ¿³“° àÏçYÌdp­^Íò h=`Aàü /$p÷Ýì¸ë.úææ¨ÌÎbZ,”* µj[G£ÃÃ=tˆ#ssôŽRJ§©É2=N'¢Ý~?ã‹‹DƒAÌJ« 048Hjb‚ùXŒÎN¼å2óóó*ÊeвÌ6Q¤.˦¦ª|æ3Ÿá/ÿò/¹ï¾ûH¦R¬ÚÉà®VÉ”ËtkãkÖpí‘#œrá…¬´Xxê©§¸øâ‹ÛNÀw¾ó,â¶¹9žM&™ ¹ÀfÃ69‰Çí&].³âœsxô±ÇX¿~=ýýý«yxW+S*±æóØŠE‹¥lñ*;é,1Gð¡FœO(„å¿Pÿk( cÀ)€”ÏãöxX¿vm#4ÏçkØ<›_­%¢ašfz~ž™ÙY &’I^L$xLJ?Ì¥oy uYÆn³Q¯Õazzš={ö°sçN"v;ëu§,#X­dA` ¿çÀ‡CÈçófË4a±XE–M}ËR¥¹tK½Ž(Ixj5–9¤­Vb ,ÅbD$]çX.ÇXw7>›’i’ÕuDQ$ìvS5 2Õ*n—‹e+VˆÇ™Îf¹hlŒ¤ÓÉ/÷ïç+ù<ðáGerj ÉnG×u^|ñE&''¹öÃæÁ;î@Û·¡ZÅì6çoÙÂÞÅE¬>œ~:Û¶mk‡KÔj5öíÛÇâƒgÅñ'0 j©j_ŽînzW®$U­219ùkgï߸s:‰¬YCµTbtl 5•"#„úú^׳ðÿ¨·‹"¾áarKKX].B]]ÿ%çí…BÇœ‡Î;óó¬þ_ÿ‹k?øAœ¿e¯¦©*©ÙYœ@UQHÎÍqñ9ç°õ´Ók5*Ù,šiR‘ezzzaóæÍ9r„gžx‚Ç÷îe…ÛÍ@$‚àóaïîÆîtèîhã…[3ˆM‘¢Qª¦‰]×L“…r½^ljPN¥˜8~§(â‡)—ËDÂaÄHÑn'X­RÎçép8z½„%‰éL«Å‚ßíÆÙÛËñéižÞ¿ŸÉnçéæßºP.³ý‰'èŠF9vì‚ ðÑ~‹ÕŠ?@µÙˆD£¤MUÑ@WýË–‘ÏçéììäèÑ£\|ñÅÔj5ž|òIöîÙû.½”@„Ïó“Ý»‰è:•£G™š›cõÖ­¥‰ÙY3Ô€e¯vÖ­×ëfR–14 MrÎ IDAT«ÍöF%ÿw)ðÖþÇ7\cÿ——JõzÝŒÇãd³Y6mÚÄy?ý)uE¡#úÅm†Y©TH$“Xö$“¼0?ÏEçŸO¨·—ÔÌ ^·k"A²Xijz5EQ$‰7200Àá@ÛÌ ÇŽeÎíÆÿž÷°rp‡Ã!hšfÆb1|'‹*L¿ßO´¿"‹¡é:RG…J…ìì,Q¿ŸåƒƒŒ'“dÊeVô÷SE2• Q‡ƒ¡¾>fãqÒµC>}¦É‰D§(ÒåvsÞºuLMLI$8ghˆ«Vaåm>÷Ý{/ápI’ø›¿ù¢Ñ(‡ÆÕÑA¶»)E …8n·òù¨‹"—½ímÜÿýH’Ä¡C‡XµjUÃE¶¸È©6à­V‚šÆÚõëIwuñ‹½{yï¾}¼ø~.GlíZ|Á åj•|¡`¾š%¶ Ì=Š‹ás:Éjª¦™ö×éYöÿ¸&ÛÅþÇ0 ³T*‘H$Ðu¯×Kww7¿-¥ÂO$hš†Ïçã…]»Ø93Û¯¹†¡B£TbbbkO.W#n§é-nY[5MÃåv3|æ™,;ï<4A`O"ÁwÞ‰ßçcÇŽf«g?y["˜’„ !G"È…ºN!Ÿg2C …Y¶ »ËEEש9töö¢V«dj5*Å"®@€¢ÛÃb¡cp_¹L&¦ÃáÀEêÕ*~?³±ù|žóßùNFþìÏåoþú¯yðÁùÞ÷¾ÇÐÐФÛâŠDˆ á?ûl–e2L?û,Z±ˆ(I8Þþö·óµ¯}X"ÁÁÇ9rø0^poý“?áÉïŸ\.GÕ0è!<6F×ü<À0PYXàÙíÛæët7·=-7áI[˜ö×KVh†A*ßÍâïîÆlBÞÿM üÿöPUÕL&“m }½^§³³ó··¦if.—£\.ãñxƒüä'?a÷îÝ\ÿÉOâEf·mÃZ,â¯Õ{[§5Ày’Ç\„ ÂfcpÍ:›€‚S|>J¥Ï?ÿ<=ôwß}7§vo}ë[ÛnµR±ˆ"„7oæéXŒÉ™ì• Z<ŽÛëeùé§SŽÅ¨--‘^\dÝi§ájÅ".¢ËEÁá GQÈf2”––°ÚlƒAÓiœ¡‡ƒ²¦±|hˆL­ÆC»w©×‘ÜnæææèêêâàÙgŸ5ív»0>>nº=p¹XuÆXNœÀܹ“X,Ö¾N‡sÏ=—cÿüÏì§24„h±0~èCýýd»»Ù?;Ë>‹…é½{™+—ùÁi§aÄãô,_ŽÇf#/Šôûý蚆$Šm~ZëĪÅh£µ¡‰¥tš°aP«Õ˜ÌçYö2ÀãuRà-k,ÃápàóùÈçó-¦ù¯HJYn|˜œNjµKKKØl6¢Ñ(†að½ï}ééi®¿þzzzz„z½nZ;;2ÌB»ª" òz±6¡-œo+Î&ÒÙÙù+–jµjîØ±ƒgŸ}–Ï~ö³lذ˜ãããø|>›x>ü쳄«Uv¼}åJ ìv“O&v:Y‰P’š†O°˜&ºÕJŸË…³X¤ÓçC³XPB!ŽNL°1Å^«ÑY­ÒßÝÅçã‰'ŸäŒ_þ’y`>eä”S8ï¼ó¸ë®»¸ï¾ûL—Ë…Ëå¢X(`µZñ5\z$ :::0 ƒh4*|å3Ÿ1?žËñžóäþý°o]n7Ñ®.Iâž]»Àjåê|€Áp˜ìþý8êuJv;ѱ1º{zx­¦È¥Ååk…9Z›Œ5MÓÐuZ½ÎâÄQM£oùr’’„·qÜõÆòüõTàš¦™-Ìr(Âf³‘L& ƒm¦ùÉ£\(=~½R¡âñàïê" áq»)•JÜvÛmT*®¿þz:::8e‡C˜Ÿ˜0§§§éRUæ ä|žP3»Ü4MTUm“F$IÂÿ |¹^7S³³Dìv®½új–’Iî¿ÿ~|ðA–/_Î%—\‚Ëá 1>ÎU• §+ËTuˆ¦QÎçYÊfY¶u+µzøÒ=’„ávSÅp%§“x¹LG6‹ÏíÆæ÷ÓãõRœ™!dµ"ùýØ¢Q´p˜›ËiDÝ+IlÜ´ ]×¹îºë¸õÖ[™˜˜`ÕªUí ÅpƒaßôxÚyë¦ÅBŒFTR]éˆ ÔÊe^œžæ`2ÉkÖpà 7 š&³ù$7ÝtŠ,3uø0Š$ñ¬Í†¶|9N‡­^§”J!Úlȵu«¿(" ¥B¨ÅB$fw&ƒntƒÔU3™$êt²85… íë£>8ȺsÎáx6ËGn¿´ Òuzº»Ù³goûÛùÔ§>Å7¾ñÿ—½÷Ž«>óýßgz¯’fF]–l#Û²lƒ11%1JàKÍ’p!…ìëwIÙôÜMîMÙMHBv„ÅdÓ Ê.˜¡°1.¸[’Õ¥™Ñhz?3gæ|hfÖS“½Ù?¯—_ÒKfæœçû}¾Ïó)lÚ´‰ºø†ßï'àv»q8$“IÂá°øÌg>ÃP_SË– ‡‰…BÈŠÂÁÙYž9tˆ@w7_ýÚ×p»ÝTe™@k+æj•¬V‹Áë%–N£ÑjÉår$ œN'§^ñ4Dêk±ƒiQT‡‚Ó‰/àlݪB­)ÅÔÊÿcH¶?g¤¢QòCC”³Y&‹V¯Æív£ª*sssX­VœNçQxI’¢Á ÈF³oMê8 ‰ïÿûtwwsõÕWc±X^ó»&“IݳG$§¦Ð&h¢Q‘þŽEA£Ñ ªê‚¥íQvoUǨÓðùˆÕÈMMMäóù†Ôp[G;ggÉ/ZÄilßµ‹X(„½Tâì®.’ñ8E¡’Ë‘Ôë)U*h«U4‰NE¡Y¯GN§‘s¹w·›Ér™®C‡xHQ¸êòË tv2²³³—ËÅÜÁƒüä'?Ál6ã¶Û9ï ùô§?Í-·Ü‹/¾ÈØØ}}}Ò}÷Ý'$IÂh42==ÍSO=E¹\æÃŸü$ýÈG¸÷¦›ø÷Ÿüo>Ïn`Å’%˜Ìf ±Á|%•B²XÈttà°ÙX²d Ç9:t«ÕÚØ½S©‹›Í¶àïV[Ôõz=&“ ¥X$£1É‹DskëÛæ”ŠEbcc¨¹¦@àóŽ%øŸâ¬-„ãªz=ªV‹½vÔåwÜn÷vçÝ>iŽÑáabÁ -¥wßu«W¯æŠ+®àœ)[:;‰µ´`ªT2‡a÷x2Aªªb·Û*¤‘$©˜Ï‹x±ˆR(PÑjÑU«lÙ²…û^Ïõ×_Óédjj ¹Tâé§žâžgŸeéð0+—¹Ïdb¶»›N«S:Íl:ÙíÆR©PÑh(ärØu:¼Ù,ÓccøNZL&¶e³¬ÎåøjµÊgf˜ÏfÉìÚÅ /¼ÀÚµk”¥‰TŠÀä$®={Øxß}|üŽ;¸æ#!‘HpÓM7±ÿ~qèÐ!¦BS­™Ÿ'PÛÕ?pî¹H’ÄÞ‰ ÎÅøˆ|ÛáÀtÜq$§§ILNB©D‹NG¨R¡eéR,ÞÎN,V«4==-¢Ñ(­­­¯j’ÖAFÓ×J&“çæ°ÍÍ‘Êfqz½4©*áéi²kÖ0‰ˆšWw£9wTÙ.Ib|hýä$­n7‰™Jµc×±ÿsŒÕ$IJF£"¢3›±h´Zb±¦.’ÿ†«¯N§“öîÛ'~ñ¥/á=p€-ÿó›ßäÊ+¯|ÓòÌêpà ’HЪ(ÌNNkm¥}ɪ• :Ç‚©ÁQŸÇlµJ¥rYLNN29<Ìæûïgdd„3Ï<“sÎ9gÁR9™\°ÔÑéøÛ+¯äɦ&ÜCCœüAQØV;Ž˜5üZ-FEAÖj©h4ˆr§ÑHÑ`À`·ÓÑÔ„ÆfcqS/ñëh”V+ú§žb>A’$-ZÄÈÈ—_~9wÝ}7gF"üÀ Ãüð |øCbpph4ÊÆC‡ŽŒÐ¼aǯYƒ,˨ªÊ 7Ü@FQˆÙíÌdÈÙí˜l6æÊeIÂX(`ÒhÈ…BtXÌfRóóT*¡ª*…BX,F[[[c¢N§çó¦¦&©\. [±ˆ©£ƒ`8L¾P "e “É/$©¡ÝVWÙ©;ÔüçdRÂlµR5(×(£šÚQàX‚ÿ™Âéõ¢$]3Í ×ÆA>Ÿï-ŸŸ¶<÷[·ò)Uåÿ Aó[ü]­F#ås9‘¥8ñ úûû©ßàÅb‘h4Joo/^σ^/3ªJù„P­Vîåx—‹–ÖV Z-- j¥‚K«%§Õ2)IËeìË—ãïꢷ¹™ÊâÅÄ…kŽ?ž§Ÿy†G~¸1Þs¹\ pA>Ϧ›o¦"/:¸_|ÛÁhÄpÚi§±ùÎ;ù¤BF#Àét²qãFÎ=÷\NzÏ{x஻شkëÖ­cÿÈ3Z-í‹£ 31?l6£j4DÓit©ÕZsÌ`0 )‹B¡P£Ÿ’N§Éd2̃":1V+ÑxaµâèêÂÒÖÆª¶6¬ÇÛ¬\¦T*-˜bÔi•JCm WOz³ÙÌD:Íx$B §Ã[Ðt;–àÿ•»¸F#eS)¡†B”âq ýï}ï[æWUUlzðAžhiássl×ëÈçßòß·ÚlÒä𰘇‘ ôñ8s££t-_þª³¿ªªb~zšâÄJµJÞá`×þý<õÔSôöörÙe—ár¹ðûýèt:R©…Bx<ŽÙlF’$¶lÙÂÈÐûÕ¯²þ”Sèêè`ëK/š™axËF‚A:—.å”õëqjµ  Å"Û÷ï' áw¹,] ŠÂ‡>ô!Z[[¥X,&6=ôK—.%óüóÏÓßß,Ëœ¼~={öìážPˆ+`ëÖ­L½ô®±1~üä“,*ø(ð"ð­PˆÑ`“ׯ糟ý,Çw\ã½Ëå2wÜz+¡Ÿÿ‹dËeÌ6îJK jk+Å‹qÕëRÈF£‘C‡Çñz½Äb1âñ8.—‹D©TÂc³¡UUª•Ê[š•‹EB5eÛ/Éèþý\ìrñôïÏSO=%6lØð–Ðu¾ÎN’½½H²Œ'—#16F¾£sWWã1‘H„7ÞH~r’V¯—ÑTŠªÏÇ5×\ƒßï'‹ÑÜÜŒÅb¡X,’L&òÍÍÍÍìܹ“]»vqÑßü K–,¡É&&&0\öÁÙ°ÑÑQöíÛÇãÛ¶Ñ¿t)ï;ýt^~øaNÞ¹“Ë…¼ùfÄ—¿Ì9ï?---<þøã Ä݆ H$¤R)î½÷^>øÁòÑ~”ßr z­–ééiÌV+ÇïÙÃW…Êç¹Ûáà[Z-» ƒ|íþ‹.ºèU°[F#;n¾™F£ìûÅ/¸âŒ3p77ãio§ÒÕ…ï(,2UU…^¯' b0X±bÑh”jµŠ¯¥ÉåÂg4’(I‹h5š… 7Ø^/é+Õ*åL¡ªXt:Š5|ıÿ3‡Åé$f·£ hšš0¿‰ÔmµZm Õ\.v»ÖÖÖÆ…ß½{·Ø¸q#™Læ-ùR™ÍfinvVLÏÍa­V)&“(‘ÕþþF3ð¦ï~Ó-·°NU¹Éjåä /dý9çПÇï÷cµZ¥y$iŒû†††Ø¶m—_~9mmm G‘p˜R©DWWW²nÝ:Î=÷\¦§§yì±ÇøÁ~ÀØœ]“V«U¬V+ÝÝÝèt:) Šn¸@ @>ŸÇëõR(8ÿüó™™™aÓ¦M¬\¹’%K–0>>¾  s¦VË6EaB§CòûÙÙÕÅî]»8eÝ:>þñ£×뙟Ÿ'_(ˆX,Æ–Í›¹ù¦›«*ۀ݀×l¦Óë%žJ!ôz:}¾×MÈjµ*dY&‰ ×ëñù|„‚AfwíB—Ï3Ëá^¾œ—‹t6‹ÞéÄîv¿­*°þ}1ŸQE!–ËahmÅd6Kð?w˜-ZV¬Xð¿¶XÞ°ó]*•Ä\ÍnØçóa: ÑdppP·Ür étY–…éMØfÍ­­¤—/GQU:m62Å"¯ìÜÉý÷ß/þéŸþ‰'î½—Ï©*«€fw©Ä‡Æ20Àê÷½‹Å‚Ñh$—˱{÷nÚÛÛ±Z­_¢Õjq8èt:©P(ˆ /¼|àlß±ƒ~ö3Îî9ÚW¯æÊãŽÃ^ó»~ôÑGéééazzš––r¹‡ƒÞÞ^/^ÌÚµkyꩧضm±XŒÞÞ^œ.“==|&b"#?6Æ{O9…&·›ÞÞ^l6›”L¥ÄÖ?üý/½D$¦wp^s ¹K.ṇfÇ®]üÅ‹q·µQik£ep«ÍöºŸ¯V«•J¥’(•JÌÍÍ¡×ëÑk4x… ¥½„ªbïêÂÝÜÜP®}§d%£ÙL`ùrªÕjû~lþßáþVJò\.ט»\®7<§/Z´Hš››7ß|3·ÝvÙlVkç#ÙIŠ¢ˆd"Á¼,³kjŠB4ÊL¡€¡© ǃËåâ}\À¯d™û¢QŒíí˜ |Õ*á;É Ð³hÑùy^~ùeZ[[iooçÀüîw¿ãòË/gñâÅD"l6ÍÍͨªŠÉd"‰àñx°Ûí˜L&)N‹h4Zÿ™T©TIJeËøî÷¾ÇÔä$÷Þs‰D‚íÛ·‹;3 .¸€—^z «ÍÆüü<6lÀëõ …x饗H$\vÙeLLN²{÷n*ÓÓ´É2I»wK ù™^|ñEª• Ž[o¾Y|ýÿoœ™ 'õöÒ»aM'DóaÒ/~ñ ‘O&‘ÛÛiv»±Õœ7L<£QÊårbff†á={Ðçó(ù¿ŸÊÜÍù<¦ÉI¼z=ÙTŠ™çŸG‘er5sÃîîn²Ù,÷ß?,^¼˜L&ƒÛí~i¦®8[Ç%“IQÇÜ[­V©R©ˆH$‚A¯gåŠtwvröÙg³uëVþùŸÿ™ÞÞ^^yåª5›e½Á@KK Ï>û,[¶lÁíñð‘|„޶6 ù<×}êSœ4;Ë:àGé4ígE!Ÿ'C:ÍL¡Àb£‘‹.¼«¿VKº¶£6›b±ˆ¯·÷m%“Íf“B¡oÛFkm·Íz½tö÷cü#JéB6K*D«×S’eñvtÜŽ%øŸ!êÖBÚ¢^Öùýþ7,ßn·[šžŸ;ë,>qà3’Äg6ofѪU8º»»yÿûßÏ¢E‹p¹\ “jµŠ¢(FY±X¤¥¹™îÕ«ÑW«ÄÇÇ mÞŒ¶\†;xåàA¼ï?ëjv;7Ýt R©Thmmm(‹ºÝnt:ñDbÁ]Ån'“ÉP(ÇUUE,#ŸÏ£Õjéèè`Ë–- ÒÔÔľ}ûX±b¿ºãª©§Ï΢êtüäÆY±v-œ>@ž›czzš®–.ܰ-;wR.•øœ¢ð›gž!8l6ºü~ÌB`·^¹§‡d&ƒÞáÀyX¤*£““ds9aƒÒüha·Ù¨är” ªv;Žþ~ô5í¹R©ô*nèá –#VëßW*¦wíÂS( 1HÔvôw;Iå/6ÁE‘‰ ʱ²Á€ÁåÂãñ¼!àäÍÂ`0Ðb4²¨vs.êìäË_þ2ÞÔšTg¯Åãq$IÂd2‘ÍfI&“d2™F9ÝÚÚŠÇë]è`<ÛfóÚb1”`\g'Æ ¸í¶Ûe™óÎ;£ÑHWWÅb±Áv¹\LŽ2¿o‹…”ǃÞhl,`ªªŠT*E.—«»¬°dÉžyæ²Ù,>ú(çž{.+W®äù_¤ðÒK|©R¡P©ð5UåCW]EynŽÌޤgf°{½Ö¯g`Ñ"6Úí¬)•¸ *xÔdI"­Ñ ‹<²icÓÓ,=ýtš–.ÅëñàÍ嘞žƒ±±1~öÙÏ¢Ù¿ŸïÆã¤Òiá:çÿ&Ç1ümmU%V.SI$0F" ¹ã£E=WöµR­"×<Ûô’D¹RyÛ:nÇüÿaäÒiÄì,vY&Íâ>ùd\.×µ7y½œúÙÏòÃý­×Ëåý(^·í!BÔ=ÌN'BæææH&“H’„ÝnG–eÜn7N§sÁ¹Ö¼I$"¸};ål³ÙL.æ–o|ƒŒNÇ_L[[ÛB ®×SÉfÉår,Y²dÁ~ix¹¼àMÓ¿n†šÄQ.—#“É4Ä„t.h¿±mÛ6‚Á W]u¿ùÍoXµr%˜žæÇ±e kíZ4Àô®]4ŽÒ*IŠE¢É$§§¹8“!|x\§C“Á@*E.—©X­”·oç·[·’s:öϦšµS&á“{÷ò!!øüÝwóv;7nv»›Í†Ãáhôê ”:Ü`0 „ÀÑÕÅ›7³cË–d³|låÊDùÈ÷H¤Ú‘½UU…U§#;=¬×ã] ©+ÑÿÛ6Ü´ZTIBo2á2ÿØC«•*•ŠØpÎ9d¢QŒ‰“›7#š›1;8¬V+ñx¼ar`³Ù0$“Iš››Ñëõ îš.W£3ëñx¤á={Ä­Û¶ݺ•„VK µ•U«V¡ÍåÈ ´ÄççÉær˜Ífìv;ápE4ªŠÃëÅîó5„ …‚ˆF£ †«­ž<6›_ýêW\yå•d2FFFXµz5mK—âÚ°Ðij™¡GE75µ06²XÐ-Z„upŽ&~úSB!~íõbìê‰`•e¾Y,rð¹Bž³Ïf}?C33|ìc£³³€p8Ì¿Üx#ÏïÝ‹ 8d4☛ã€Y|ß]ï-Ô©º;«Ýn'‹‘Ú¸‘Ï$“ܵkä‹.zGIy¸Tr]…çX“í¿qØʽ½¤ ,^/Ö?‘yœN§“Eåéi<Õ*Ñd’Œ$ѽd ÁØØét§ÓÙð®…Btvv¢( •`Âr„ŽÖn§04ÄwS)ž¶ æóŒ?ú(‰hõ¸ãPãqR• ËO; Y–) ”m6¬.ö¶6¼h$I*•J"‰`® W˜Íæ¯Z­VQ+TUepp¹¹9Œ&‰h¯^Ϻ¶6ÂÑ(ÉÝ»Ñ{½tøýH&厺N;Ž¥Ké‘$¢ßû?¹á:úû©„Bô-_Îø–-´^Àfµ2R(°véRŽ;þxžxâ >ñ‰OL&yì·¿¥Ùïgþà÷.éñq>°~=]]] ÖŸ$I Ž"5´™ªª ¨i.—#™H0´w/æb‘ÀQ.# ÿåS˜c þß$´Zí‚q P\ø“]–<ç®\ÉÄÞ½˜ôzºÌffR)mÝJNQAßÕEÿºuttt,,fBUUE4E·`ÜP’Ñh4Xm6âá0[·b-Ù»—'ž~šb<Î)Bp5ð£h”øŠtôõ‘O¥º»ñtwÓ70@K Á‚*íÐÐ?ýéOyyËÖÌÍñK!x,â¸k®AÓÖFsk+gžy&©dn™: IDAT’‘áa¾÷½ïa·Û±Úí”¶laÙ‹/²ÙéDûñc·Ûyï{ßË<ÀîÝ»9çœs¨T*$ ªÕ*§Ñé6T+B`Fé h9ÿ|¤Žvƒ<´iíí o÷x<.êdŸc ÿ.LðÿÊ0Ò\0(ÊN'NYÆ­ÓQ™´X0Ùl L1“ ¿ß,Ëèt:”TÔf³Q(Äèè(©TŠöövöìÙC{WË-8kljù÷¼‡Ñ矧 a³Û,VÒRŠÅ¨vwƒdÓi17:J®TBÕhp{ÿþïÿÎà‚½½½Äb1‚Á @ Ñ¤œŸžÆ–Nc)(jµØV¯¦såJVÜpÃ ìØ±ƒK.¹„bÍ3½nä¨] ¿*á%I¢Ë!çrXLoq,ÁÿJÂÓÜLòøãÙõüó(ö©)‚²Ìq§†V«mÐD“É$v»h4ŠÑhlܸ‹…åË—S.—yâ‰'¸òÊ+iòx^uƒåóyAŒŒ`0›‡áÐh0·µáÒhȦÓ¦§ÑF£ÌLLànn&%‡ƒ¬ÙŒ5 (ËÜþ…/ðÞ'ŸÄ*IÜ‹‘~m·³Xövt0îñ°ì¢‹ø—_ŽÕb‘E¡PˆÙ™žxâ ž{î9dYÆjµ¢×ëI&“TU•=Ë—óéùy²ýý\60@Q–1íÝËYcc<+I¼rÆ|êÓŸæÁ;ïÄtÏ=¤o›->ÙÔ„N§Ãf³ÑÕÕÅúõëyðÁùô§?M±X¤½½r¹Ìää$‰D‚&‡òü<&­gO £‘î5k°ÕfècccâÆodÕªUô÷÷/Ñj3îr¹Œ,Ë l‚F«¥R*QÃ%IDN|+VÛÁņÁ`*Õª(äóh_~M:Íh¹L!Æh³áóù(—Ë ºc8nˆvttÐÜÜŒV«•~ùË_ж¶6Ö¬Yóš¿aµZ¥ªªŠ@_ó““L8@anOk+æÖVtÈ2v“ ·xkî¥r*…ÕëE2IÈœx…`â=ïÁ·lùBSO;O­XÃéÄa³aÐë¥b±(^yåî¹ç¶nÝÚ`²õõõ188H à™gžA«Õòů~•R¥‚ÍbÁa·³s÷nñ8ïÒB5›á0¥‰ ¾ €Ï÷õá;ùd~zûí$ Ö®]‹V«åŒ3Î`hhˆçž{ŽóÏ?Ÿx<ŽÝngùŠggÙõÔS4—JÄ â²L÷)§¼j:ÒÛÛ+mÚ´IÜyçäóyñzŒ±Ê‚³ãã¨É$Ž–ÒùÿùÏ¿.&^{—¼kùräb¥TÂ\›-[[I„B¨mm“‰Šªb6›inj"ªªÌƒ(«Wó•tÑÒÂy_L¾P ‘LrÁùç7ž;sÿý÷‹}ìc¼òÊ+ØívV®\É9çœÃqÇG6›¥¹¹NÇo~óV¯^MWWWCZ’$N<þxv_|1×?ð9»K׬¡ßngéÀ?C*•0¶·ó¹Oš—¶lá?ø+V¬àÄO䤓NâÜsÏåß~þsõöÒd21º};Îövšz{é ‘ùB»Ã ‘ôùK%!±0ß.—ËìØ±ƒM›6QßÁá?gßõ¤Ïg³BS(/— çrÚÛß”}ønŒcM þ“'\.—)‹È²ÜÐ:/•JdR)H$ÈÏÎÒaµ¢Óë™*h$#ËT*Òé4f³™÷¼ç=D®T*âÆo$pÕUW½ãϺZ­ ¥\^ ˜f2”Ëe”l–J±Èd"ÁT(„ßçCV,f3^·›Çy„?<ò—]s kO=•ß?ù$O>ù$™L†U«Vqæ™gr ',8²X, áÈB¡ÀÈÈ_ùÊWøû¿ÿ{Ö­[×hèU*´ ³““¼üä“xe™¥MMxz{™×ëIמgüÐ!FGG¹îºëxüñÇÙ¾};---$S)Ú|>vÜ?&‡]~9ïd<¦j³aÖëÉçóÒi4 s¥®5kh©±êê ÅÉÉIn¿ýv®¾újú—-C­ÍÓëMGDGFp¥Ó˜ŒF¢F#='žˆå­üc;ø»(åšd²e9%U«U¡(ÊBcg~žr¹L]5´FÇ$N£×ëYÚß¿€ ‡É3¾m¹`±gž!ßÓ×_Þ@p¾KïØ±ƒP(ĵ×^ûGwõüY±X{öì!–JÑÞÖÆÀÀñxœjµÊ^|‘‰Ûoç¡ìÝËÏY·~=×^{-'t>ŸƒÁ år9166†¢(ôööb0¤d2)}ôQE¡³³³±3jµZTU%:6†av–EBÐÝ×GU§cV’è[³­^OKK éO䡇â–[náÒK/%Ó×ׇÁdâß¿üe~4?Ï¡ÉIîÑj±Kéd’&§“¦žr6šÖVÈf1öv ’$Q7dhkk“6=ô¸}ãFÖ,[ƪ“NbÝI'Õ¯5•j­^F’0 Ø]®£ bKðwIä³Y¢Éç‰Øl„Ãáì´.Çër¹¼`Y–E4mWêî—ªªŠyŽÔ‹/ÒZ(Ð’N“(—Ñ•jõUê&ù|^|ó›ßäüóÏÇ}I¡?&ŠÅ¢Ø»w/‘H„–––ú‘­V‹,Ëd²Y\ù<«€må2­–SO;÷œtÍÍÍH’D$Éd’¶¶¶ÆL ›ÍràÀzzz ·5ä™N§[ðTÏåèðx(ù|Ä$‰¾N Õï_Àz×Ë@ ÀE]„ÅbáÞ{ïeÙ²eìÚ½›ÁU«èêé¡4?Oxe|ÇsÏá3™(çó¼°m§jvF’$aÙ¼ÇÓx}V«•ûî»OƒAxüqšî¾›û;ï=÷°zpP:ìš‹äÔUÅÝÙ‰î¯Ô^ø¯"ÁsÉ$ÕHƒ$QH¥0û|4µ´nr×Hàt:-ê& u­µJ¥"J¥±XŒb©„±³“h8Ls[³™½O?b0Ð]ƒj<ù䓘L&N=õÔ?Y †»wï&•Jáóùp8ø|>r¹\æq8¸Ïíf\QÐwV·›ï|ûÛô÷÷ÓÝÝM{{;Ç<+V¬h àdYF©TÄwÜÍf£¥¥UUwI’Ðéõ˜Â33::pöôкbF£Q’eYÄb1"5ÕÖ@ À¹çž‹Ùl澻醴»›ÍÏ?Oÿ>À¿55áioçoL&ªÁ }ýý4ûýô¬[‡»µjgí|>ÏÔÔÑh´ÚËårLNMñòK/qR&ÃÕÀÔÔ[^z‰ùùy i4¨€§&`©×é^pG}Ê¥ÒÂcŽÇtÑÿÒJób±ÈÌää‚ÿ¶É„§­ ßQ\G+•ŠˆÅbȲLss3F£‘R©D4ápX€Ÿ¼\Àì’%$ggQC!|á0ÅR‰Ð®]çùö·¿Í'?ùÉ75@|+Q©TD<$3#ÕT^L&---˜L&)/ØG"ìÚµ s @Ç…ÒÝÕÅùç×…Õ©­÷Üs›6mbùòå,^¼˜-›7³õ·¿eç¾}t,[FoooC•´1UÐëiééaV«Åãp`¶X%¼Éd’ªÕjƒI&„ ÐÚÊš ûöñÊæÍh$‰‘ñq®þüçY·n?ô÷_{-îGaÛi§ñË.{¯[µZóóó˜ÍæW‘‰††‡ÅMóó|m×."«V1ÐÒB8FÍfÙ,— W Ðèèt:¢Ñ¨Ðét yêÈÔå`ŒFŸ‡|žU}}ô¹ÝÜõÌ3l;tˆßÜv-ªÊ¾_ä’l–‹€ÏnÝÊoŸ|’x<.'êÔL‘H„l6+ìv»°dñb¾ôë_3:>ÎÒ¾>,f3Ãû÷“ß·þÖVª²¼@¾q8sòú×B¡°ÀÔÛ³‡nƒ£ÕJvnî]É×%¸ªª"ŸÏ‡1 u]³W]´r¹,榧QŠE,Á`°áÛÇ6Df³¹Þ¹=êE·;øŽ?žÐ¶mt:Ò:#O=ÅþøÚW¿ú')ùªÕªBÎfqX­F¬ :®nº(¥Ói±wï^„ ãr¹èëëÃl6#Ë2¡PˆN;í4)“Ɉ‡~˜7ræ™grõÕW37?Ï·Ÿy†®`ªªRÊçÄJ¥Bpf†Ð؆b‘òÜÕhÓ¢EäÊeä¶¶×¼f—Ë…Ûf#¸cåb‘äÌ -&ç]tŧŸfüÉ'ÙâtÒÛÑÁ3~?Ád’Ý&òsϱkûv¿ûÝïÄÀÀMMMèõz)—ˉx<ÞÐÍ;Úg›ŒFÅ|&C¹X$£+qºÝÊá÷ˆ¢(HÉ$"‘ ¯ªè-–w¥Cé»*ÁË岨wÄk.£¯I2U1¾k™çž#ŸJv»é?ýtššš]Ú·Šk®S[::x졇xâúëQ‚AÜW^‰Çëý“T!óóóhM&”–¢é4ÖŽl&“ ³ÙL6›;wîl Ð¦¦¦xÿûß‚R©D V¦Ö*QÍk׮宻îbûöí\qżïïÿžoÝp±J…fŸ@Äc±X¾ü2æÙY¬F#®ÎN,ªJ¥XDRÕ<÷ˆÏ}±ÈR‡ƒ‚VË!IÂ`kmåüÓOç÷ãã<üÐCœÿñsñw’ˆÇù›Pˆèü<þð‡™œœdÛ¶m<òÈ#477³iÓ&‘H$¨Á×ó³»Ý(+WRŒÇi±Z©¨*¡Pˆ|>/j‹µT¿n…|^dæç1 ¸jòÒÇü¿aT«U‘Ëå‡Ã˜ÍfÀkVíÆcU•|(D(éõ”$‰¶¶6\n÷;ZÁ5TV±ý©§¸jhˆ%Àÿyþyb‰Ä;ª>ê -EQ˜››C«Õb4iéë£R©àp:‘kì±L&Ãää$‘HŸÏÇ“O>IOOOCMµ©© »ÝÞ wäkFõ¼X,ЧŸ~š7²b`€3¿þu¶ïÜÉîÝ»I§RX-"è'&h×éPÊe ’DÙçÃ`·ãokƒ£|d’$a°Û‘ÒiwW’ÙL2›¥ÛdâÃW\ÁÃÏ>Ëm<À×Ö¬aÑÒ¥¬?åøÿ`Ó¦M\ýõœ}öÙÄb1öíÛÇ®]»xúé§±Ûíøý~Ö­[G±Xf³YRUU¤3Jå2hjkC´¶6è¨Ùl–x<Ž^¯§X, £Ñ¸ÀCtZ-–žéÝ,Ýôù¦*•Š tz=HÒ«˜I‡¯Ô¯—DÓCC$^xÊeŒƒƒ,= œòNã[_ýª(þã?²8xñÅ|ý¿Àép¼åçT…¹TŠôì,Ѝf3N·¥\nhŠÙívòù<‡EQ…Bd22™ Á`‡zˆ‹.º¨¡ðZÿ­VK±X¤³³³ÁN;üoC!qǯ~Åïx€¶‘(•ð^~9çœr šDm(„­\&ÑÖFÏy硱X>{Àj±àóù^ó>K¥’HÏÏ/¼v!˜ÁéráI§©ær$´ZîÛº•§ÿõ_9UU)|2wãlzðA´Z-ŸúÔ§hÀ:+ohhˆ;vpàÀ¬V+ƒƒƒ8Nž¹í6JÁ ëÿ×ÿâƒ×\sT5ÜL&³0iPÔD$áèîÆÙÔÄ»Y—í/n¯V«"2>¾ m¦(H~?-À›Ê#¾ãV*aóz©V«¸ÔžžžÿôÝïâûÚ×].®»è¢·•ÜÔXÉñq¬‰ÉhÝâÅ$…B"Ñå¢ÉïoŒ«Šµ3f|R©TxüñÇ9묳X¼x1ù|§ÓI.—k$¹ªª ÁÆ#Ãïóqú‰'²ùßþ¯$“8€/>û,bÙ2œ€}Ù2JF#½«Vѽl‘š$u<GQ”£Ž êÌjµ*æÆÆ0g2h5²6“ ÑH{[—d³|®Tâ;¿ÿ=/¼ø"W\q7Ýt·ß~û‚„µÑˆªª¸\.N>ùd©¹Ýºu+£ccÜuë­üÝ–- ßýá9é}ï{M _¯ædYÃ[·b‹Fq{<$§§±½ ã„c þÿ¢;^.£D"x…@ÉdÐx¼Þ·µ ÿ± ýªÆN2)n¸áN;õT®¸âŠ…êá _xÛÏ#„@T* ˆFM§ñy½4 L!úû©ØÒé4j¥B2•¢X*ñ‹/âmjâŒ3Î ™N7Dív;©Tªæ©V«¤R©×$@j~K—\rÉÕ57 ÒǙ۽J%Z-vUÅX"ò¥Y› ±bÅxœìØáX ÕbabÇþöÚkQd™àþýä"l²Œwñ↌”ÑdB®É:`U©ªŠÝbáʳÎâ1§“˜ÕÊÙ\@óà Ù\—χ}AoN*•JBÑX4R©Ô«<¸²˜¢±ÛÉÅãÈÕ*‡£QB' QüÇdz|œ³—-cýúõ(Š‚ÛíæsŸû?úÑØ¼y3—^ziã˜R×r3 $ÆÇ9±«‹-—\ÂKñ8M:ž}§Ó‰»½ý¨Õ›\,Š„FC±ZÅÛÖ†®¦ txoâX‚ÿC¯×KÅBAdãq¬F#çÏvT¸ûî»I$|ñ‹_|Û:ìGMr«•¢Í¹íd$‰™¡!bCC8[ZÀïgæàAJá0F#6‹…M=Æ:¿ŸfI"}ð ÖC‡pV«ØÓi²Á ¹p˜ùx×¢Eh,ªªJ"™¤X,Šš0‚ähj¢ØÓƒ1ŸçT§‹ß¹»o[EA®Y—ËeQg•i4¬V+‘H¤!úøF×+jµR*°v½\.^tr©„A¯§\.Óäó‘J¥¸ú꫹ýöÛ±ÛíuiéÆg\’eÑév³¨¯Ž¦&¢ æb‘g{Œáûïçc==¤Ói¡Ñh’Òõ¾ÞÞ†»‰¢(" R‘er™Œ°½Í£Õ±ÿ/³Åòg¿O?ý4/¿ü2_üâq¾ ½ï×k&“I’É$ΞÊ–E‹ “!71I’‘Ñd’.I" ’q8845E*âäU«ˆŒcÓéЕJ Rù<ù™¬é4ºBéùyp:ÑêtP­ÒàY@« N'UUUx:;:ps­‰–I§ñûý(å2†h4ŠÕjmŒ뻹,Ëoz½*•Š‡Ã QÊúŽ©ªªPj€¤ºÕ¯Ïç“ …‚Ðjµ\|ñÅÜqǯQÍÕêõZZ¨”JTªUz—/ÇårѺv-÷=ø ?þñ¹öÚk´ê…Ãi¥õDŸFÅÄК©)\f3ñL†²¢ÃëL`Ž%ø_IìÙ³GÜzë­üÝßýíííïøfPUUȲL8n(ÄtõöR*—Ñjµäçç$ššš¨V+mn7Íå2Sé4‡öï§É4ùàÿ~æ3,‹Å0¬\Éu?ü!‹ûú°9hµZ©®ŸÍf…Åb‘dY¿þõ¯ùÁ~@0|Ý×–ËdD\§#&ËX::tŽ%ø_g¤Óiqà 7pâ‰'²aÆwü<²,‹¹¹9„uÖýr£‘ámÛHŒ£dµZÚûúX|L&p߯ÍoùKB±W\=–5kÈ&ȱ…Dƒ^OI¯·›T*…«¥“ÇCÁá@ñùÐÔ8ðÙ®.zW¬Àh2¾è4$Œu:‹eA›½\ÆëõR×bÏår$“I¡“$&¶l!3;KÈl~SõúÈëhè·zO–eæææ^eå¬Ñh¤»w‹ãC!þO4Ê}Á ·¹Ý\u饸zz(—ËÂ`0Hù|^Äãq …‚0™L’¢(âÞ{ïåûßÿ>¢§§ç5¯Íb³ahP‡µï²FÛ± e´RëÜÞö¯ÿŠÇãá²Ë.{G~ÒÕjUd³Ù†Ä²ËåB’$æççQ…L2‰5›Åi63|ð ¦+XzÜqÔû‚ssâùþØ¿Ÿ]ÀX2Iç '€Ì8@\Qå2–ž¬ýý¬8ñD†·mÃ’ÏckkcÅI'Q*NOÓµà~$s«Ñª»¶ ²Ù,>ŸOJ¥RâÐè(mmmär9Òss˜òy&¥p˜rOÏ›6HçææDáu ŒF£T(ÄÈÈ¡PèUóu§ÃAÑïç…h”Cf3Ź9~×]d´ZìÜwß}bll ÛߟR©Ô(óÍf37Þx#ÃÃÃbéÒ¥Ò‘G„wóý{,Áß$¹ŸÿÃxìÎ;I Øü~¾þõ¯¿# h'_©ThnnÆd2¡Ñh¤t:-Âá0.— _ @!›%R5^•ÜIŽ /”°è´Z*ŠBÕhDøý$æçié页«K*—Ë¢°reCR#I¸½^JŠòªÒü°f_ƒt£( N§“d2I>ŸG.•D:!5<Œ®PÀß×GdxûܲFƒÍïG_32x³2=›ÍSû)•J‰¡¡¡Ä^¥"4 Áÿò/üáÑGéîèàªÅ‹q ÁT¡@T§#³sçNŠÅ"R ~üûßÿ^ƒAÎ:ë,L&?ùÉOxâÉ'…,Ë,Y²„¥‹Kð¿Æ¤®w\Ñ¿þÒ—¸bëVv…¯|åmÎn3›Í v,ø‹8p—ËE{{;z½^ Ï̈ƒ0xÊ)Ž˜åºº.¹„ë¢QNܰOô£Tj˜uos3þ¶6¦§¦ÐšÍ¨ª*J¥:£ÑˆÁ`¨»®H‘HD”Ë壽ÖFƒ­X,6Êô|>Ol~ujŠã,&$1;‹[Qð÷õ•e¨5çÞtb6“J¥ŽZ¦7Þ§Ë%MOO‹/¼@os3VŸ¦®.ÞwúéRUUBŽÇ)e2 x<Ø\.J¥étšééiöîÝËã?ÎC=Ô`ŠEnøÐ‡8;Ÿçwýý\÷óŸ¿ëïçwm‚W*‘‰ÇQd{S«õU ›ºVÝß» ˜››£\.S*•84>N>¤È;‹Å£"¤ŽLèb¡@¹XÄ`6S‘ðx< v[݆xxx‡ÃAOOÚšñáh[º”ž#v—:³Lo2qÚUWqõÿüŸæææå~¹\Æn·7ì?S›Íf2™ ÕjU¤ÓéW«ŽLp½^ªª!ez*™DW(Ðd³¡œÄX=eívœ½½T޶hZ­¶NüxÃÇ9¬V¬é4r*…&›%ët6ºý°€Ý§ÆE?Щî“H$ª·33œšNó…R‰ïÙÃî;%ø_j¤¢Qä¡!4å2“f3‘HDÌÍÍ5\-Ÿ‰ !P…rM½TQZ}>Ö]=_þñÑú||æÃ~Sm¯B.G|ÿ~D:MTüË—¿ŠÝVïž×ýź»»ÑjµRµZSSS(ŠBwMªøð³{4¥T*1;;Ëgœ ÐMZZZÒRuß®z\¯×7Êíºè`©TB¯×#ËòkÊdUUÎà5¶²¢`±XPJ%R££K%âZ-¦@€€ÙL$¦ÚÕ…Å ÒÙ,éÚòFý F#¥R)Q(^·L‡Ä¢¿µ•Èð0S¾˜ô‹ IDAT##t·¶R(…ÉhD£ÑHo†^¬[GÕ'K—.•FFGÅM/½ÄÍ;v0¼x18–à©QÎç±,Ì~(%“”J%lv;–šd$I¨ªŠ,ËÈ5écƒÁÐI0›Íx½^1¸\T…N8áMËóB&ƒ‹a§V‹Ýf£¢($æç…Îh¤ž¨†šì’Á`TUóóóÄãqúúú( `†ªª¢nÏ“ËåP…®®®†%Òႎõ{ÞjµR((•J æ™V«Åd2Q(Ò#1äu©è\2IüàA”PˆÀŠP*á.•ðX­ìvô­­”“I´.æ¦&´:r*Åüð0åînäbQ˜Þ@¢Øl6ózUÄáÝmG?“á0Å`Ìø8ÓZ-N‡|>/L&Ó›6:ëUQ0äàÎÂT(ð·7ÜÀÄÔ[±‚Á•+ßõ ®y·¾1{s3Y«•¬ÙŒ©¥‘LRN¥Ê£ñxœh4Ú°ß­i›!Ë2‹…ÖÖV¬V«¤Õj¥¶@€êÜŒ‡—…¥j•h¥‚b4bjmEÒh˜ß¿ŸÜ+¯°ÿé§IÆãh4š³>«µ‘À‘H„@ €Ãáx¶;—Ë‘Ëåhjjb÷îÝ´´´P­Vë‹CCŒñÈ2¸>s®ïÜB,Ë«JãüZ­ UUA’ÈÎÎÒªª8³YÒ33ØÝnJZ-éR‰l¥BKW­ÇOÿúõ "““(³³´ ~nŽt4úfÝtt:Ý"à´Z­d÷xèéèàÄ¥KqÊ2&“ÙL<'‰ËåDµZo¸ƒét’Q’(Žb …è4ÿÿö¾$FŽëLó{±Gddä^™U,î‹hI”܆zìqÖ¡-Œ}x.3ð±íkÚ˜£`x ÌaàAú4€0 Ø'Ã#µ!O{,«Û-I‘,.UŪÜר—oÎ*V‹I•ÅøY™ßû÷ïÃW¾ò|æÏþŒðOÓê'ÖƒŠEH/¼s6C|íŒÁwoÜ@ïÔ)4VWQ( ªjFœr°Õëõ¬Â½Pô9°ò›d0‡ ý׈£šaÀ1MH–ãÐÞÚ‚åû° ?÷¹,\‡( ¥qêS.²z½Ž8Žñî»ïâÅ_D¥RA©T‚mÛ;¼`JrÀC¡PÈÒôßeYΖ6ÒVXjéëžáy(…€ç±tìfɺg1éƒ'TÆ$e&m›&êÅ"BÀñ<î>F£}Ûe‹aºP*Ae Óé>¥(ŠºÓ41æ‡e±„6zÏï†#JQ„—HM=-ö‰x&acYÌ ptU…Òh`ii $y¨Ói2]×ï.H­^¯Ã4Í £‹–Àc;xÔÀuf‹°··!ˆ"VuNÚL03M6Jöº«Õ*dY&©GJÕR Ã@¡P —/_fÝnŸÿü糃)U^Ù¢§õEQ2—$¿&½^¥j¨‹E± à‚€êéÓ‹"Ç¡vü8$I"Ûwï²Q»šadc«éG)eëâÆú:–OŸ>ý‘¦iiʱ$IĶ,f ‡h‰"œ0L'ÿ2å˜dè&ýYB¹Eâ$ â!F½ŽÁÊ ¶ uyyÇpOð?qSTÊÉ“u»šM°8ÆÖûïƒ) eiiiÞ'ÞÇ;뺞àîÉõƒ€u»]ˆ¢ˆZ­vÏ!+ –ž{“fq©êºv:À;ïÀ/Ð§í¬½@^ÐubYãêu¬;†´Ux¿Ð>I3²îÃ^‡‚$I$Š"DIz¤|yˆþ'®ÜQ1k2†¸uó&LÓÄÒÒÒC;»Bˆ¢(¤±²BªKKD–eˆõ:¦‚áÔ)Ï>‹ÚÅ‹à\üÆè­[8×l"fŒy®ËF¦Iª¨*\ÏC†sqBÓÛÜı(B´¹ Dz ª*€™i"CÀ¶q²Vƒä8ؾv Jµ Ó00”$æ3ñv$%sL«è•¹zúý~vˆpGjµTUE§ÛÅt2aÓш-†í¢(BJ¢£´¯ßétà8»ß=L%žG£Â(º§.Q5<Íà~ê<øÃظÛEpã*£¦IHû(w{Ç(Š˜]¯C>w£õu@Q ‰"6ß}D±zì¢(Bm ÜÍ›pUcY/Iè^¾ Úïc4°µwßEØí¢uæ ¢t¨gsƒím˜7obùÄ DªŠ»˜ôû0dþxŒæ¥K ŒA8„ʦa°,+KYÒëO§ñRà§s÷ÖíÛ¸þæ›XÒu˜++£ˆe5‘$ÝÄqÌf³úý>¦Ó)+‹û^ ÇqÄó}ví½÷0úðC›Mø¾Ïõw“ül”Rvë½÷ ŒÇ¨«*†¾Ÿµ†ù±àiÆã1O&øÙ~€ÿûOÿ´Z€aŒN§s}î8†=™€ù>–ƒB¿Û[[¨Ä1lžÇÍñ«ù—ð†CˆwïB\[ƒÄóPN«~etº¢`4‚Ÿ/™Ã^ël6c‹ì,‹¬,ÉhmæÉÛ7o²%žGç1&5ƒE€/vÒvX†ζÇay2AÑuÁ…!f…BþÐ>í!úa,áJcý~¡ 0 °jöÊçTUÅ._Æï~ÿccŸýÝïð~ùKH’DäV cž‡¥iÐ ŽCÑ0 Jâ0D£TÂé³gQ?sÕåe°0DU×Q5 èš.ÙæZ:{¡a`"ŠPšÍ>¸’Í8˜¦‰…›,--a2cíý÷±}í¬ÙŒ)¥P*Áb (3U”Å‚á®÷&­V žçe¹ýžATMƒQ*Íë\þ8çü>†!Ku¼ …ÎêSˆ/\À6€k¿þõ¡*½ N0¾·mX\žG%EãÔ)¸Æ¼8§ª% Žáy?Ónˆc”Oœ€,Id:²‰ïgÏ‚Š"üZ ÕãÇ¡hÔb,Ž¡î!Òx˜´"¥HZ¬º‹’>à^»†r¥‚‘ë¢õüóà/]Bày(—ËÅtÚÒÛËdY&Q±áp˜Q8I’d”ÇŠªB?wãN¢®Ã¨×ó8øþáx*`¸XôIüÿ÷Î;ìæú:ÚÎc½ŽÙlƾ÷½ïáßý'þöoñ­wÞAã™gðŸÿæoî åÓhC}öY0Q³™þ€¹â¦úâ‹óƒƒçç?ÇqD,BUUH’”‘$&îÇ¡Y­BE¸IÿÝXØzû£þã`Î~©@º¿¾½½ âû(…ÞlîPJ-%3üüS4¡–üÂq×uÑn·Õjš¦íðhÛí6ûo÷wþs|g{·nßfgö ðû¨×qùòe¼öÚk¨T*øÚ×¾†z£×ó &sç{E÷ó¼s,“㸬'¸8Ž#ÖlÆF® ' ¡?ž6{GÙä@e”4ÿÞX[cÃ÷ÞÃjµŠ™iBIòm.uðý†!úý><σa(‹{æØoýö·¨¿þ:þ«ïã¿ÿó?ã—o¼ñH¯Å4MöÃþo½õ¾ô¥/á _øÒ^ór³™ñŒí5½õq›¢(¤ßï³Å¶™¦ëŸ>[^Ù„)áÆAÚfÙçÈ2–*h’w]öÜžò"¥”ÍÆc6êõ˜ëº« !ËË˨T*÷ôL“°BpË0ðhþ÷oàG?úëv»l¯}ì9d®^½Ê¾õ­oáÎ;øÆ7¾W_}•ȲL‚ € PU5°mûP=â'm¥R æl†öæ&Ì锥7I’ÈA–ã8rPˆ¾hF£¶ºŠªB>qÊ.¥“Üžr> `}ø!"ÛÆ-YFó™göÝ"£”2ÇqÐn·aYΜ:…Ï¿öÖn߯øó?ÇÉÓ§ñúë¯ãµ×^ç?ýilll°ÕÕÕ mÛf?þññ«_ý _üâSæÏl1åÊ•+(•J™²¦(Š0 #ÕBgG‰¬€¿Ó×é j6?ÿü½öP\QHD)‹)/y¾|ÁS2ÆÚׯ£ÈbŽƒO)êÕêG?÷vº­( t]Ç¿ÿò—Qo4²âTEl}}¿øÅ/ðï|Ï<ó ®_¿ÎΜ9s`+-Žc¶¶¶†oûÛày_ÿú×qêÔ©‡C`Œ¥`óâYÁ0 ض½³¨u,ð}è”ÎL'¸p}‡8€ûòáåö”œ#„Œû}6ëõ ( Œ]Þ”Ò¬ÐÆC±X̘UkµÆã1Š»ö•SÇqÌÚí6^ýu|÷»ßÅñãÇñþûﳋ/f|k1ãˆã8ì§?ý)Þxã ¼òÊ+xõÕW±×²DÚ¢K Â0„ªªÄu]Öëõv(|Ü&É2¸„o½gšXy€~tJx™ÛcŠ®ž¦¢šcY|²ªBžÓ2ÁqL§Ó,ÏMw¬Ã0ÄÒÒf³¢(ÂÒÒÒ!xº\ñæ›oâ7¿ù *• ^yål­¯ãòÛoãÔ¥KØØÚB†øêW¿ŠsçÎíù~”R–r¨;Žƒ•••Œú·V« ,´Z­#SE|ŸÙ¦ À´m”ËåC©®õz=÷ιÿ^W£ÜÜÄìæMŒLç_~îÔk ‡C(вCí EMEUI§Ûe<Ï£”´ÃÒ›Ülb²¹‰˜ça,-í0—ôׯþîw˜pŠËËG®X•‚<Žã¼k©¥4Õ‡c)³KzhÙ¶­ŠæaûSð(Š˜ïysR=QIà0 áº.ºÝ.Â0Çqe9“×5M­V ²$Å1ZçÎAVâû>ó¶·Ñ *ÏòPø~×’*›BP¯×Ñëõ2M°TŽ@J_¼o(ÍâÒ.Õ žãHD)sým5á:_´ÀuQ˜L@l¡ïÃJ¨…Zz”zò»wïbëÖ-ÖÙÜÄêéÓE”R,²Ã¦ßñd2A·Û…aGnD7øc²0 Ywm Q·‹HUQŠÅb¶† Ì·ŸËb³^’,¨¯†! |„ôz=‚€z½J)¦Ó)*• &“ J¥ÇÉ„÷zˆžés?¨©…¼jÑñã°ËeåEJÆŠ’^UAP×Íîºî=^:ý)¥!£ÀóˆM„1”WWsÒ.H¨,ó,hõ:Ê'O¢R­ÞÃtbš&: Ã@)ñؽ^/%tH÷‘³×iº¾C´î è!l·1ð<4Ÿ{­åeãñxNQœìA§`M£EQR]ì{rät¹…{ïkÛ6J­ôF¢(b:î+ ð±w<my[$MÃÊò2Ü0ÌîÑ~½ðô®ç±kÿöoP:4ªUŒ‚ ù¤\–eT/^„5¢¬ë0*ÄŒ±8ŽÁ%­~¿ß÷Ñh4 ª*8Ž#®ë2ß÷Q‹Èï ¦û…{žãIô@ƒrB;ä:|ßG³ÙÄh4ÊtÁ’‡3æ9óR:‹âbA°N§]×A)c ªªb0ÉVS*f8¹xÍf¥r™t:Ì&î'),Iª• dÇÄó°’ß9·OÀwWSã8f³á³ímPžTª¦ayy9£Žã˜õû}hšI’Èp8d“º‰’ªª(…T© ¤[—/£?bùüyDQ/ÙBK'Ïóê4Ý+D' Š%‡µÙlY–Áó|¶u–j© ƒ#Y˜ ÃZ¡½XÌêqCUÕûê°óGÌé”Ã¥0Nœ/äÔ‚Ÿ(€ïõÀLoÞ„nšØ¡<û,–öرö<ËËË™Ç|˜pX’eÔ.^„5ASUØ[[{=¨³º¾!€P±Ül"¢<Ïg¢(Êä{wRšÖ}-¾ï³N§ƒf³‰ÙlQÁó<\×E½^G§ÓÉFYR‹Éu]H’”…ÝÇR ]×1w¨žîe…bÊ¥K@º3ŸÙöw†Ÿˆß‚1 Ž! j• Š{ÐO§ÓÌ{§óa¾ JJÕ*c AÄÝ.–(EÕu1ît0ìvaM§y¥4“ïÝkFQ´£8x[§¥Éa¢ªj–4 8Žs¤¶Îâ8f¾ïC] \J=¸(ŠDE¸»4Ï÷úDA ¢(’¼7þ\”$è§Na¨ëˆCi׈©çy,%VÜã¥S§ÐWUXÕ*´åe(š^Q0ÚÚ‚wù2¢ë×1ÜÚB˜ŽÇY¹;LOú^¹®‹R©J)£”Bˆ¢Æ‚ €$I¤^¯c2™ÀuÝ#‘¨&[q;6ô!¯ù>Ö/_ưӹGo,·§4DO‡(Œ„;íSJ™cYèloCÓõ£§©WýHŸ›œ[ÛÛL‘efÝ.8U…Úí¢Q,‚2†­[·@=Ä÷!EÄjƒ^¶e15Y¬HCôÃzÁ^¯‡B¡ Äqœæá¤Ûí²ô) d<§R@û¼ºçy;: A°«W1ívq÷æM欯Cé÷a¹.ø^Èš|ÿÂ[sÖÚ†ã1NüÅ_ìø¿v»ýP!ú>…ð"¥ÐE€g7vað‚RÊf’„È÷Ñ*—1º}~¯‡ˆã@dƒ«WÁݹ{4‚u€BfÌsl;cX]<ÌÒÁä³ÁqÜ/.B¦BrØHá‘Öcûwa]½ 2c*IÙa[ª×1°m , ÅV "cΟGµV{dßOðO ñÉ@…isÎJež«3ÆËÂl@¸¹‰¾ï?úá½<øA&/|^̬Õ0•J¨ïqèDQÄÚW®€ÛÚ‚dšðëõ9M!w:è¾÷"Æ`$Þß!(‚;w`—JÙ{©Š¿×ƒ»¹‰R½Žè™g½5÷QÛq¿Ï¦“ 8J¡4›;fîÓp:²´[ð‡2Ç4¡:õ:¦¦ ײ>öC(æ9$Y†´Á#c q¾Š®# Ã9G¥°¶·Qö<0B`noƒÆ1›F% †ªb¸ëð‰ãŽÃØ4Ñ·m,-/Ï糕j5H/¾ˆ˜R¨…Â=¤q³n·›çÐÌþP¦ °5 Žçê:”=ö«Ô{?¨?L¤±h¢(’ÞÖ»½µ…zµŠÆ±càç­5Æ) ºÓ)xQÄ’¦(–ËΜÁp<†Z¯£Œ„óÞ½´´„cŽÇПTMä> ¢AdÜr€?”i…ðüóð, À¼‹`áaíqOKª†Ö§?ååå,¼åyž˜³ÛîõQDee%RÇ,^]·kcŽç8bÛ6«æ3(%óÜÃáðcgu]yxžü±{’'‘ƒ?,’}÷k¦Ž??ï ,\ÃA¿g†¨Õë`ha6›¥´RÈÇAΦúž÷ü<{œ§”Þ3¿½|mNy çÛ¢y Ù¡ÀóÐuŠ¢Àqœ¬Gþ¤ï]†,‚+¶¹å?–æàÓRº§Ýýë(ŠXPU²,Ã÷ýû4Š"†a¦t¼\.ò,L§Ó'ò<<Ï~ä½÷ãôàéz¹ù¯ RIDATåîÞp†™v¸¢(Ã0ë/ðì° „€çyhš×uÑh40›ÍžøöYžç?²æ:Æý>ËBü\yÇ,¥>ø‚ Ì·²î#ÿãû>DQ„$I„çy„aˆR©”‰6 L&Ìf³'âÅóð<øÑ·ë²ÁÕ«ànÜÀèòe8ÁóEQ„(Šîøî½jžçÉaòpÏó²P?¸(ŠD×uL&(Š‚z½Žñx ˲ظ‡ày>Ïs€=ó’ãàt³‰Bî;fú‘>Ã÷3:¦E£”" C( m>EQÌÃ)¥lñ5‚ dì)†ad"…BT«U ‡C8ŽóXAî8N>Ü’ühšZ( *—1à´…’Géá×+S ‚Çí¾,Ë¡Â~ÑÀâZj*8Ç1E‘‹Å¬ÈV,I¹\F¿ß‡ëº,!µ|¤`ÏÃóÇo9[ÝG0YQˆïy̱mTTõ‘ Í,’µÛmÔjµ=Ÿ-fÀO·È‚}ö¨Óh`‘å5xæÅ-ËʘMSáÀÍ È”BVUøžÇä8»Ÿ¦ÓÙ Ï?ñí¶Üƒçö@ ¯ÔjDÕ´GΖÌv€Rºgáçy²Ô"ÀeY΢ŽãæsîImPR4 t;L†C–!†ý>Ü>»u ÃÍÍÜF ÃuoÝÂí·ß†7ç´Ç9ÀŸ>‹¢ˆMÆã”Þˆìοã8ÞÁk–¥ ªºgÇ1 ‚`GÎNAð]÷J1üàôÿõ_Ñ[[CD) ²Œfµ çA}޳Ñt}MÏ×íÂù˜~r€çöDÍ÷<¶}å ¶ß~æÖÂ0Ì0ø>ëµÛˆÂpON¹´’î$d©Çìoma°½ ²"÷û_¾Œ»¿ÿ=f£À›L°*ËhÉ2è`€8Š`?Ž™®c\(ÀXYùÈòÄs=µr¹<ŽÌYóüi2s2ÐëᔢÀžN³ö[¬sõ*Ì[·À 8Æ=¯µ§SL®^EtçêÏ>;'h\_‡óá‡à- c]M¤•í­-œ”$0œ‹F$¦¦Á¡0 ˆ’ei …JeNhùDŠÕ*‚“'1˜N¡5ó Ür€?-&Ê2Q„Êó`¢8÷r˜“Upã1N”J°¢ÎtzOn¾}å Ž‹"Ä8†¹µ…b½ŽÀ4QSÔecËBœŠ,¨* ˈª À¨VÁ]º„Ð÷±T.ãq°°Š¢Hh"%¼{Û-·àŸxÓ ôSŸ‚;¢T­BM<œ¬ª †g2A i¨ìjËBÀ«*˜$à4 <ÏCo611M8†ÚlÎÕ@8ޏŽÃ¦šÂó¨µZOh|ê'bùM>¢¶ŸÊ©ë8ÌžÍ «* Åâ½ýqßg“n1¥(5›P5ÄqÌ<×ëÍAOî÷9¹å–Û>â¼÷”[n¹å–[n¹å–[n¹åvÔìÿÅÓðÊP™§ÒIEND®B`‚networkx-1.8.1/doc/source/static/trac.css0000664000175000017500000000522512177456333020300 0ustar aricaric00000000000000 /** Link styles */ :link, :visited { text-decoration: none; color: #CA7900; border-bottom: 1px dotted #bbb; } :link:hover, :visited:hover { background-color: white; color: #2491CF; } h1 :link, h1 :visited ,h2 :link, h2 :visited, h3 :link, h3 :visited, h4 :link, h4 :visited, h5 :link, h5 :visited, h6 :link, h6 :visited { color: inherit; } .trac-rawlink { background-color: #*BFD1D4; border-bottom: none } h1 { margin: 0; /* padding: 0.7em 0 0.3em 0; */ font-size: 1.5em; color: #11557C; } /* Header */ #header hr { display: none } #header h1 { margin: 1.5em 0 -1.5em; } #header img { border: none; margin: 0 0 -3em } #header :link, #header :visited, #header :link:hover, #header :visited:hover { background: transparent; color: #11557C; margin-bottom: 2px; border: none; font-size: 1.5em; } #header h1 :link:hover, #header h1 :visited:hover { color: #000 } dt :link:hover, dt :visited:hover { background-color: #BFD1D4; color: #000 } dt em { border-bottom: 1px dotted #bbb; color: #CA7900; font-style: normal; text-decoration: none; } .milestone .info h2 em { color: #CA7900; font-style: normal } table.progress td { background: #fff; padding: 0 } table.progress td.new { background: #f5f5b5 } table.progress td.closed { background: #11557C} table.progress td :hover { background: none } /* Main navigation bar */ #mainnav { border: 1px solid #000; font: normal 10px verdana,'Bitstream Vera Sans',helvetica,arial,sans-serif; margin: .66em 0 .33em; padding: .2em 0; } #mainnav li { border-right: none; padding: .25em 0 } #mainnav :link, #mainnav :visited { border-right: 1px solid #fff; border-bottom: none; border-left: 1px solid #555; color: #CA7900; /* #11557C; */ padding: .2em 20px; } * html #mainnav :link, * html #mainnav :visited { background-position: 1px 0 } #mainnav :link:hover, #mainnav :visited:hover { background-color: #ccc; border-right: 1px solid #ddd; } #mainnav .active :link, #mainnav .active :visited { border-top: none; border-right: 1px solid #000; border-left: 1px solid #000; color: #CA7900; font-weight: normal; } #mainnav .active :link:hover, #mainnav .active :visited:hover { border-right: 1px solid #000; background-color: white; } #metanav { background-color: white; } #main { background-color: white; } /*#content { padding-bottom: 2em; position: relative; background-color: white; } */ #altlinks { clear: both; text-align: center; background-color: white; } #footer { background-color: white; } #sitefooter { background-color: white; } #siteheader { background-color: white; } #header { background-color: white; } #banner { background-color: white; } #main { background-color: white; } networkx-1.8.1/doc/source/static/force/0000775000175000017500000000000012177457361017731 5ustar aricaric00000000000000networkx-1.8.1/doc/source/static/force/force.css0000664000175000017500000000015512177456333021540 0ustar aricaric00000000000000 circle.node { stroke: #fff; stroke-width: 1.5px; } line.link { stroke: #999; stroke-opacity: .6; } networkx-1.8.1/doc/source/templates/0000775000175000017500000000000012177457361017342 5ustar aricaric00000000000000networkx-1.8.1/doc/source/templates/indexsidebar.html0000664000175000017500000000220112177456333022662 0ustar aricaric00000000000000

Download

{% if version.split('.')[-1].startswith('dev') %}

This documentation is for version {{ version }}, which is not released yet.

{% else %}

Current version: {{ version }}

Get NetworkX from the Python Package Index, or install it with:

easy_install networkx
{% endif %}

Questions? Suggestions?

Join the Google group:

You can also open a ticket at the NetworkX Developer Zone.

networkx-1.8.1/doc/source/templates/layout.html0000664000175000017500000000154312177456333021546 0ustar aricaric00000000000000{% extends "!layout.html" %} {% block rootrellink %}
  • NetworkX Home
  • Documentation
  • Download
  • Developer (Github)
  • {% endblock %} {% block relbar1 %}
    NetworkX
    {{ super() }} {% if version.split('.')[-1].startswith('dev') %}

    This documentation is for the development version {{ version }}

    {% endif %} {% endblock %} {# put the sidebar before the body #} {% block sidebar1 %} {{ sidebar() }}{% endblock %} {% block sidebar2 %}{% endblock %} networkx-1.8.1/doc/source/templates/index.html0000664000175000017500000000771012177456333021342 0ustar aricaric00000000000000{% extends "layout.html" %} {% set title = 'Overview' %} {% set script_files = script_files + ["_static/force/d3js/d3.min.js"] %} {% set script_files = script_files + ["_static/force/d3js/d3.geom.min.js"] %} {% set script_files = script_files + ["_static/force/d3js/d3.layout.min.js"] %} {% set css_files = css_files + ["_static/force/force.css"] %} {% block body %}

    High productivity software for complex networks

    NetworkX is a Python language software package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks.

    Quick Example

    >>> import networkx as nx
    
    >>> G=nx.Graph()
    >>> G.add_node("spam")
    >>> G.add_edge(1,2)
    >>> print(G.nodes())
    [1, 2, 'spam']
    >>> print(G.edges())
    [(1, 2)]
    

    Documentation

    Features

    • Python language data structures for graphs, digraphs, and multigraphs.
    • Nodes can be "anything" (e.g. text, images, XML records)
    • Edges can hold arbitrary data (e.g. weights, time-series)
    • Generators for classic graphs, random graphs, and synthetic networks
    • Standard graph algorithms
    • Network structure and analysis measures
    • Basic graph drawing
    • Open source BSD license
    • Well tested: more than 1500 unit tests
    • Additional benefits from Python: fast prototyping, easy to teach, multi-platform

    {% endblock %} networkx-1.8.1/doc/source/reference/0000775000175000017500000000000012177457361017302 5ustar aricaric00000000000000networkx-1.8.1/doc/source/reference/api_1.8.rst0000664000175000017500000000111312177456333021165 0ustar aricaric00000000000000********************************* Version 1.8 notes and API changes ********************************* This page reflects API changes from networkx-1.7 to networkx-1.8. Please send comments and questions to the networkx-discuss mailing list: http://groups.google.com/group/networkx-discuss . * Laplacian functions now all return matrices. To get a numpy array from a matrix use L = nx.laplacian_matrix(G).A * is_directed_acyclic_graph() now returns false on undirected graphs (instead of raising exception) * cycles returned from simple_cycles() do not include repeated last node networkx-1.8.1/doc/source/reference/algorithms.component.rst0000664000175000017500000000306112177456333024204 0ustar aricaric00000000000000********** Components ********** .. automodule:: networkx.algorithms.components .. currentmodule:: networkx Connectivity ^^^^^^^^^^^^ .. automodule:: networkx.algorithms.components.connected .. autosummary:: :toctree: generated/ is_connected number_connected_components connected_components connected_component_subgraphs node_connected_component Strong connectivity ^^^^^^^^^^^^^^^^^^^ .. automodule:: networkx.algorithms.components.strongly_connected .. autosummary:: :toctree: generated/ is_strongly_connected number_strongly_connected_components strongly_connected_components strongly_connected_component_subgraphs strongly_connected_components_recursive kosaraju_strongly_connected_components condensation Weak connectivity ^^^^^^^^^^^^^^^^^ .. automodule:: networkx.algorithms.components.weakly_connected .. autosummary:: :toctree: generated/ is_weakly_connected number_weakly_connected_components weakly_connected_components weakly_connected_component_subgraphs Atrracting components ^^^^^^^^^^^^^^^^^^^^^ .. automodule:: networkx.algorithms.components.attracting .. autosummary:: :toctree: generated/ is_attracting_component number_attracting_components attracting_components attracting_component_subgraphs Biconnected components ^^^^^^^^^^^^^^^^^^^^^^ .. automodule:: networkx.algorithms.components.biconnected .. autosummary:: :toctree: generated/ is_biconnected biconnected_components biconnected_component_edges biconnected_component_subgraphs articulation_points networkx-1.8.1/doc/source/reference/news.rst0000664000175000017500000007400312177457176021020 0ustar aricaric00000000000000.. -*- coding: utf-8 -*- .. currentmodule:: networkx Release Log =========== Networkx-1.8.1 ------------ Release date: 4 August 2013 Bugfix release for missing files in source packaging Networkx-1.8 ------------ Release date: 28 July 2013 Highlights ~~~~~~~~~~ - Faster (linear-time) graphicality tests and Havel-Hakimi graph generators - Directed Laplacian matrix generator - Katz centrality algorithm - Functions to generate all simple paths - Improved shapefile reader - More flexible weighted projection of bipartite graphs - Faster topological sort, decendents and ancestors of DAGs - Scaling parameter for force-directed layout Bug Fixes --------- - Error with average weighted connectivity for digraphs, correct normalized laplacian with self-loops, load betweenness for single node graphs, isolated nodes missing from dfs/bfs trees, normalize HITS using l1, handle density of graphs with self loops - Cleaner handling of current figure status with Matplotlib, Pajek files now don't write troublesome header line, default alpha value for GEXF files, read curved edges from yEd GraphML For full details of the issues closed for this release (added features and bug fixes) see: https://github.com/networkx/networkx/issues?milestone=1&page=1&state=closed API Changes ~~~~~~~~~~~ See :doc:`api_1.8` Networkx-1.7 ------------ Release date: 4 July 2012 Highlights ~~~~~~~~~~ - New functions for k-clique community finding, flow hierarchy, union, disjoint union, compose, and intersection operators that work on lists of graphs, and creating the biadjacency matrix of a bipartite graph. - New approximation algorithms for dominating set, edge dominating set, independent set, max clique, and min-weighted vertex cover. - Many bug fixes and other improvements. For full details of the tickets closed for this release (added features and bug fixes) see: https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.7 API Changes ~~~~~~~~~~~ See :doc:`api_1.7` Networkx-1.6 ------------ Release date: 20 November 2011 Highlights ~~~~~~~~~~ New functions for finding articulation points, generating random bipartite graphs, constructing adjacency matrix representations, forming graph products, computing assortativity coefficients, measuring subgraph centrality and communicability, finding k-clique communities, and writing JSON format output. New examples for drawing with D3 Javascript library, and ordering matrices with the Cuthill-McKee algorithm. More memory efficient implementation of current-flow betweenness and new approximation algorithms for current-flow betweenness and shortest-path betweenness. Simplified handling of "weight" attributes for algorithms that use weights/costs/values. See :doc:`api_1.6`. Updated all code to work with the PyPy Python implementation http://pypy.org which produces faster performance on many algorithms. For full details of the tickets closed for this release (added features and bug fixes) see: https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.6 API Changes ~~~~~~~~~~~ See :doc:`api_1.6` Networkx-1.5 ------------ Release date: 4 June 2011 For full details of the tickets closed for this release see: https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.5 Highlights ~~~~~~~~~~ New features ~~~~~~~~~~~~ - Algorithms for :mod:`generating ` and :mod:`analyzing ` bipartite graphs - :mod:`Maximal independent set ` algorithm - :mod:`ErdÅ‘s-Gallai graphical degree sequence test ` - :mod:`Negative edge cycle test ` - More memory efficient :mod:`Dijkstra path length ` with cutoff parameter - :mod:`Weighted clustering coefficient ` - Read and write version 1.2 of :mod:`GEXF reader ` format - :mod:`Neighbor degree correlation ` that handle subsets of nodes - :mod:`In-place node relabeling ` - Many 'weighted' graph algorithms now take optional parameter to use specified edge attribute (default='weight') (:ticket:`509`) - Test for :mod:`distance regular ` graphs - Fast :mod:`directed ErdÅ‘s-Renyi graph ` generator - Fast :mod:`expected degree graph ` generator - :mod:`Navigable small world ` generator - :mod:`Waxman model ` generator - :mod:`Geographical threshold graph ` generator - :mod:`Karate Club, Florentine Families, and Davis' Women's Club ` graphs API Changes ~~~~~~~~~~~ See :doc:`api_1.5` Bug fixes ~~~~~~~~~ - Fix edge handling for multigraphs in networkx/graphviz interface (:ticket:`507`) - Update networkx/pydot interface for new versions of pydot (:ticket:`506`), (:ticket:`535`) - Fix negative cycle handling in Bellman-Ford (:ticket:`502`) - Write more attributes with GraphML and GML formats (:ticket:`480`) - Handle white space better in read_edgelist (:ticket:`513`) - Better parsing of Pajek format files (:ticket:`524`) (:ticket:`542`) - Isolates functions work with directed graphs (:ticket:`526`) - Faster conversion to numpy matrices (:ticket:`529`) - Add graph['name'] and use properties to access Graph.name (:ticket:`544`) - Topological sort confused None and 0 (:ticket:`546`) - GEXF writer mishandled weight=0 (:ticket:`550`) - Speedup in SciPy version of PageRank (:ticket:`554`) - Numpy PageRank node order incorrect + speedups (:ticket:`555`) Networkx-1.4 ------------ Release date: 23 January 2011 New features ~~~~~~~~~~~~ - :mod:`k-shell,k-crust,k-corona ` - :mod:`read GraphML files from yEd ` - :mod:`read/write GEXF format files ` - :mod:`find cycles in a directed graph ` - :mod:`DFS ` and :mod:`BFS ` algorithms - :mod:`chordal graph functions ` - :mod:`Prim's algorithm for minimum spanning tree ` - :mod:`r-ary tree generator ` - :mod:`rich club coefficient ` - NumPy matrix version of :mod:`Floyd's algorithm for all-pairs shortest path ` - :mod:`read GIS shapefiles ` - :mod:`functions to get and set node and edge attributes ` - and more, see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.4 API Changes ~~~~~~~~~~~ - :mod:`gnp_random_graph() ` now takes a directed=True|False keyword instead of create_using - :mod:`gnm_random_graph() ` now takes a directed=True|False keyword instead of create_using Bug fixes ~~~~~~~~~ - see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.4 Networkx-1.3 ------------ Release date: 28 August 2010 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - Works with Python versions 2.6, 2.7, 3.1, and 3.2 (but not 2.4 and 2.5). - :mod:`Minimum cost flow algorithms ` - :mod:`Bellman-Ford shortest paths ` - :mod:`GraphML reader and writer ` - :mod:`More exception/error types ` - Updated many tests to unittest style. Run with: "import networkx; networkx.test()" (requires nose testing package) - and more, see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.3 API Changes ~~~~~~~~~~~ - :mod:`minimum_spanning_tree() now returns a NetworkX Graph (a tree or forest) ` Bug fixes ~~~~~~~~~ - see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.3 Networkx-1.2 ------------ Release date: 28 July 2010 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - :mod:`Ford-Fulkerson max flow and min cut ` - :mod:`Closeness vitality ` - :mod:`Eulerian circuits ` - :mod:`Functions for isolates ` - :mod:`Simpler s_max generator ` - Compatible with IronPython-2.6 - Improved testing functionality: import networkx; networkx.test() tests entire package and skips tests with missing optional packages - All tests work with Python-2.4 - and more, see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.2 Networkx-1.1 ------------ Release date: 21 April 2010 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - :mod:`Algorithm for finding a basis for graph cycles ` - :mod:`Blockmodeling ` - :mod:`Assortativity and mixing matrices ` - :mod:`in-degree and out-degree centrality ` - :mod:`Attracting components ` and :mod:`condensation `. - :mod:`Weakly connected components ` - :mod:`Simpler interface to shortest path algorithms ` - :mod:`Edgelist format to read and write data with attributes ` - :mod:`Attribute matrices ` - :mod:`GML reader for nested attributes ` - Current-flow (random walk) :mod:`betweenness ` and :mod:`closeness `. - :mod:`Directed configuration model `, and :mod:`directed random graph model `. - Improved documentation of drawing, shortest paths, and other algorithms - Many more tests, can be run with "import networkx; networkx.test()" - and much more, see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.1 API Changes ~~~~~~~~~~~ Returning dictionaries ********************** Several of the algorithms and the degree() method now return dictionaries keyed by node instead of lists. In some cases there was a with_labels keyword which is no longer necessary. For example, >>> G=nx.Graph() >>> G.add_edge('a','b') >>> G.degree() # returns dictionary of degree keyed by node {'a': 1, 'b': 1} Asking for the degree of a single node still returns a single number >>> G.degree('a') 1 The following now return dictionaries by default (instead of lists) and the with_labels keyword has been removed: - :meth:`Graph.degree`, :meth:`MultiGraph.degree`, :meth:`DiGraph.degree`, :meth:`DiGraph.in_degree`, :meth:`DiGraph.out_degree`, :meth:`MultiDiGraph.degree`, :meth:`MultiDiGraph.in_degree`, :meth:`MultiDiGraph.out_degree`. - :func:`clustering`, :func:`triangles` - :func:`node_clique_number`, :func:`number_of_cliques`, :func:`cliques_containing_node` - :func:`eccentricity` The following now return dictionaries by default (instead of lists) - :func:`pagerank` - :func:`hits` Adding nodes ************ add_nodes_from now accepts (node,attrdict) two-tuples >>> G=nx.Graph() >>> G.add_nodes_from([(1,{'color':'red'})]) Examples ~~~~~~~~ - `Mayvi2 drawing `_ - `Blockmodel `_ - `Sampson's monastery `_ - `Ego graph `_ Bug fixes ~~~~~~~~~ - Support graph attributes with union, intersection, and other graph operations - Improve subgraph speed (and related algorithms such as connected_components_subgraphs()) - Handle multigraphs in more operators (e.g. union) - Handle double-quoted labels with pydot - Normalize betweenness_centrality for undirected graphs correctly - Normalize eigenvector_centrality by l2 norm - :func:`read_gml` now returns multigraphs Networkx-1.0.1 -------------- Release date: 11 Jan 2010 See: https://networkx.lanl.gov/trac/timeline Bug fix release for missing setup.py in manifest. Networkx-1.0 ------------ Release date: 8 Jan 2010 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ This release has significant changes to parts of the graph API to allow graph, node, and edge attributes. See http://networkx.lanl.gov//reference/api_changes.html - Update Graph, DiGraph, and MultiGraph classes to allow attributes. - Default edge data is now an empty dictionary (was the integer 1) - Difference and intersection operators - Average shortest path - A* (A-Star) algorithm - PageRank, HITS, and eigenvector centrality - Read Pajek files - Line graphs - Minimum spanning tree (Kruskal's algorithm) - Dense and sparse Fruchterman-Reingold layout - Random clustered graph generator - Directed scale-free graph generator - Faster random regular graph generator - Improved edge color and label drawing with Matplotlib - and much more, see https://networkx.lanl.gov/trac/query?status=closed&group=milestone&milestone=networkx-1.0 Examples ~~~~~~~~ - Update to work with networkx-1.0 API - Graph subclass example Networkx-0.99 ------------- Release date: 18 November 2008 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ This release has significant changes to parts of the graph API. See http://networkx.lanl.gov//reference/api_changes.html - Update Graph and DiGraph classes to use weighted graphs as default Change in API for performance and code simplicity. - New MultiGraph and MultiDiGraph classes (replace XGraph and XDiGraph) - Update to use Sphinx documentation system http://networkx.lanl.gov/ - Developer site at https://networkx.lanl.gov/trac/ - Experimental LabeledGraph and LabeledDiGraph - Moved package and file layout to subdirectories. Bug fixes ~~~~~~~~~ - handle root= option to draw_graphviz correctly Examples ~~~~~~~~ - Update to work with networkx-0.99 API - Drawing examples now use matplotlib.pyplot interface - Improved drawings in many examples - New examples - see http://networkx.lanl.gov/examples/ NetworkX-0.37 --------------- Release date: 17 August 2008 See: https://networkx.lanl.gov/trac/timeline NetworkX now requires Python 2.4 or later for full functionality. New features ~~~~~~~~~~~~ - Edge coloring and node line widths with Matplotlib drawings - Update pydot functions to work with pydot-1.0.2 - Maximum-weight matching algorithm - Ubigraph interface for 3D OpenGL layout and drawing - Pajek graph file format reader and writer - p2g graph file format reader and writer - Secondary sort in topological sort Bug fixes ~~~~~~~~~ - Better edge data handling with GML writer - Edge betweenness fix for XGraph with default data of None - Handle Matplotlib version strings (allow "pre") - Interface to PyGraphviz (to_agraph()) now handles parallel edges - Fix bug in copy from XGraph to XGraph with multiedges - Use SciPy sparse lil matrix format instead of coo format - Clear up ambiguous cases for Barabasi-Albert model - Better care of color maps with Matplotlib when drawing colored nodes and edges - Fix error handling in layout.py Examples ~~~~~~~~ - Ubigraph examples showing 3D drawing NetworkX-0.36 --------------- Release date: 13 January 2008 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - GML format graph reader, tests, and example (football.py) - edge_betweenness() and load_betweenness() Bug fixes ~~~~~~~~~ - remove obsolete parts of pygraphviz interface - improve handling of Matplotlib version strings - write_dot() now writes parallel edges and self loops - is_bipartite() and bipartite_color() fixes - configuration model speedup using random.shuffle() - convert with specified nodelist now works correctly - vf2 isomorphism checker updates NetworkX-0.35.1 --------------- Release date: 27 July 2007 See: https://networkx.lanl.gov/trac/timeline Small update to fix import readwrite problem and maintain Python2.3 compatibility. NetworkX-0.35 ------------- Release date: 22 July 2007 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - algorithms for strongly connected components. - Brandes betweenness centrality algorithm (weighted and unweighted versions) - closeness centrality for weighted graphs - dfs_preorder, dfs_postorder, dfs_tree, dfs_successor, dfs_predecessor - readers for GraphML, LEDA, sparse6, and graph6 formats. - allow arguments in graphviz_layout to be passed directly to graphviz Bug fixes ~~~~~~~~~ - more detailed installation instructions - replaced dfs_preorder,dfs_postorder (see search.py) - allow initial node positions in spectral_layout - report no error on attempting to draw empty graph - report errors correctly when using tuples as nodes #114 - handle conversions from incomplete dict-of-dict data NetworkX-0.34 ------------- Release date: 12 April 2007 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - benchmarks for graph classes - Brandes betweenness centrality algorithm - Dijkstra predecessor and distance algorithm - xslt to convert DIA graphs to NetworkX - number_of_edges(u,v) counts edges between nodes u and v - run tests with python setup_egg.py test (needs setuptools) else use python -c "import networkx; networkx.test()" - is_isomorphic() that uses vf2 algorithm Bug fixes ~~~~~~~~~ - speedups of neighbors() - simplified Dijkstra's algorithm code - better exception handling for shortest paths - get_edge(u,v) returns None (instead of exception) if no edge u-v - floyd_warshall_array fixes for negative weights - bad G467, docs, and unittest fixes for graph atlas - don't put nans in numpy or scipy sparse adjacency matrix - handle get_edge() exception (return None if no edge) - remove extra kwds arguments in many places - no multi counting edges in conversion to dict of lists for multigraphs - allow passing tuple to get_edge() - bad parameter order in node/edge betweenness - edge betweenness doesn't fail with XGraph - don't throw exceptions for nodes not in graph (silently ignore instead) in edges_* and degree_* NetworkX-0.33 ------------- Release date: 27 November 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - draw edges with specified colormap - more efficient version of Floyd's algorithm for all pairs shortest path - use numpy only, Numeric is deprecated - include tests in source package (networkx/tests) - include documentation in source package (doc) - tests can now be run with >>> import networkx >>> networkx.test() Bug fixes ~~~~~~~~~ - read_gpickle now works correctly with Windows - refactored large modules into smaller code files - degree(nbunch) now returns degrees in same order as nbunch - degree() now works for multiedges=True - update node_boundary and edge_boundary for efficiency - edited documentation for graph classes, now mostly in info.py Examples ~~~~~~~~ - Draw edges with colormap NetworkX-0.32 ------------- Release date: 29 September 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - Update to work with numpy-1.0x - Make egg usage optional: use python setup_egg.py bdist_egg to build egg - Generators and functions for bipartite graphs - Experimental classes for trees and forests - Support for new pygraphviz update (in nx_agraph.py) , see http://networkx.lanl.gov/pygraphviz/ for pygraphviz details Bug fixes ~~~~~~~~~ - Handle special cases correctly in triangles function - Typos in documentation - Handle special cases in shortest_path and shortest_path_length, allow cutoff parameter for maximum depth to search - Update examples: erdos_renyi.py, miles.py, roget,py, eigenvalues.py Examples ~~~~~~~~ - Expected degree sequence - New pygraphviz interface NetworkX-0.31 ------------- Release date: 20 July 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - arbitrary node relabeling (use relabel_nodes) - conversion of NetworkX graphs to/from Python dict/list types, numpy matrix or array types, and scipy_sparse_matrix types - generator for random graphs with given expected degree sequence Bug fixes ~~~~~~~~~ - Allow drawing graphs with no edges using pylab - Use faster heapq in dijkstra - Don't complain if X windows is not available Examples ~~~~~~~~ - update drawing examples NetworkX-0.30 ------------- Release date: 23 June 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - update to work with Python 2.5 - bidirectional version of shortest_path and Dijkstra - single_source_shortest_path and all_pairs_shortest_path - s-metric and experimental code to generate maximal s-metric graph - double_edge_swap and connected_double_edge_swap - Floyd's algorithm for all pairs shortest path - read and write unicode graph data to text files - read and write YAML format text files, http://yaml.org Bug fixes ~~~~~~~~~ - speed improvements (faster version of subgraph, is_connected) - added cumulative distribution and modified discrete distribution utilities - report error if DiGraphs are sent to connected_components routines - removed with_labels keywords for many functions where it was causing confusion - function name changes in shortest_path routines - saner internal handling of nbunch (node bunches), raise an exception if an nbunch isn't a node or iterable - better keyword handling in io.py allows reading multiple graphs - don't mix Numeric and numpy arrays in graph layouts and drawing - avoid automatically rescaling matplotlib axes when redrawing graph layout Examples ~~~~~~~~ - unicode node labels NetworkX-0.29 ------------- Release date: 28 April 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - Algorithms for betweenness, eigenvalues, eigenvectors, and spectral projection for threshold graphs - Use numpy when available - dense_gnm_random_graph generator - Generators for some directed graphs: GN, GNR, and GNC by Krapivsky and Redner - Grid graph generators now label by index tuples. Helper functions for manipulating labels. - relabel_nodes_with_function Bug fixes ~~~~~~~~~ - Betweenness centrality now correctly uses Brandes definition and has normalization option outside main loop - Empty graph now labeled as empty_graph(n) - shortest_path_length used python2.4 generator feature - degree_sequence_tree off by one error caused nonconsecutive labeling - periodic_grid_2d_graph removed in favor of grid_2d_graph with periodic=True NetworkX-0.28 ------------- Release date: 13 March 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - Option to construct Laplacian with rows and columns in specified order - Option in convert_node_labels_to_integers to use sorted order - predecessor(G,n) function that returns dictionary of nodes with predecessors from breadth-first search of G starting at node n. https://networkx.lanl.gov/trac/ticket/26 Examples ~~~~~~~~ - Formation of giant component in binomial_graph: - Chess masters matches: - Gallery https://networkx.lanl.gov/gallery.html Bug fixes ~~~~~~~~~ - Adjusted names for random graphs. + erdos_renyi_graph=binomial_graph=gnp_graph: n nodes with edge probability p + gnm_graph: n nodes and m edges + fast_gnp_random_graph: gnp for sparse graphs (small p) - Documentation contains correct spelling of Barabási, Bollobás, ErdÅ‘s, and Rényi in UTF-8 encoding - Increased speed of connected_components and related functions by using faster BFS algorithm in networkx.paths https://networkx.lanl.gov/trac/ticket/27 - XGraph and XDiGraph with multiedges=True produced error on delete_edge - Cleaned up docstring errors - Normalize names of some graphs to produce strings that represent calling sequence NetworkX-0.27 ------------- Release date: 5 February 2006 See: https://networkx.lanl.gov/trac/timeline New features ~~~~~~~~~~~~ - sparse_binomial_graph: faster graph generator for sparse random graphs - read/write routines in io.py now handle XGraph() type and gzip and bzip2 files - optional mapping of type for read/write routine to allow on-the-fly conversion of node and edge datatype on read - Substantial changes related to digraphs and definitions of neighbors() and edges(). For digraphs edges=out_edges. Neighbors now returns a list of neighboring nodes with possible duplicates for graphs with parallel edges See https://networkx.lanl.gov/trac/ticket/24 - Addition of out_edges, in_edges and corresponding out_neighbors and in_neighbors for digraphs. For digraphs edges=out_edges. Examples ~~~~~~~~ - Minard's data for Napoleon's Russian campaign Bug fixes ~~~~~~~~~ - XGraph(multiedges=True) returns a copy of the list of edges for get_edge() NetworkX-0.26 ------------- Release date: 6 January 2006 New features ~~~~~~~~~~~~ - Simpler interface to drawing with pylab - G.info(node=None) function returns short information about graph or node - adj_matrix now takes optional nodelist to force ordering of rows/columns in matrix - optional pygraphviz and pydot interface to graphviz is now callable as "graphviz" with pygraphviz preferred. Use draw_graphviz(G). Examples ~~~~~~~~ - Several new examples showing how draw to graphs with various properties of nodes, edges, and labels Bug fixes ~~~~~~~~~ - Default data type for all graphs is now None (was the integer 1) - add_nodes_from now won't delete edges if nodes added already exist - Added missing names to generated graphs - Indexes for nodes in graphs start at zero by default (was 1) NetworkX-0.25 ------------- Release date: 5 December 2005 New features ~~~~~~~~~~~~ - Uses setuptools for installation http://peak.telecommunity.com/DevCenter/setuptools - Improved testing infrastructure, can now run python setup.py test - Added interface to draw graphs with pygraphviz https://networkx.lanl.gov/pygraphviz/ - is_directed() function call Examples ~~~~~~~~ - Email example shows how to use XDiGraph with Python objects as edge data Documentation ~~~~~~~~~~~~~ - Reformat menu, minor changes to Readme, better stylesheet Bug fixes ~~~~~~~~~ - use create_using= instead of result= keywords for graph types in all cases - missing weights for degree 0 and 1 nodes in clustering - configuration model now uses XGraph, returns graph with identical degree sequence as input sequence - fixed Dijkstra priority queue - fixed non-recursive toposort and is_directed_acyclic graph NetworkX-0.24 ------------- Release date: 20 August 2005 Bug fixes ~~~~~~~~~ - Update of Dijkstra algorithm code - dfs_successor now calls proper search method - Changed to list comprehension in DiGraph.reverse() for python2.3 compatibility - Barabasi-Albert graph generator fixed - Attempt to add self loop should add node even if parallel edges not allowed NetworkX-0.23 ------------- Release date: 14 July 2005 The NetworkX web locations have changed: http://networkx.lanl.gov/ - main documentation site http://networkx.lanl.gov/svn/ - subversion source code repository https://networkx.lanl.gov/trac/ - bug tracking and info Important Change ~~~~~~~~~~~~~~~~ The naming conventions in NetworkX have changed. The package name "NX" is now "networkx". The suggested ways to import the NetworkX package are - import networkx - import networkx as NX - from networkx import * New features ~~~~~~~~~~~~ - DiGraph reverse - Graph generators + watts_strogatz_graph now does rewiring method + old watts_strogatz_graph->newman_watts_strogatz_graph Examples ~~~~~~~~ Documentation ~~~~~~~~~~~~~ - Changed to reflect NX-networkx change - main site is now https://networkx.lanl.gov/ Bug fixes ~~~~~~~~~ - Fixed logic in io.py for reading DiGraphs. - Path based centrality measures (betweenness, closeness) modified so they work on graphs that are not connected and produce the same result as if each connected component were considered separately. NetworkX-0.22 ------------- Release date: 17 June 2005 New features ~~~~~~~~~~~~ - Topological sort, testing for directed acyclic graphs (DAGs) - Dijkstra's algorithm for shortest paths in weighted graphs - Multidimensional layout with dim=n for drawing - 3d rendering demonstration with vtk - Graph generators + random_powerlaw_tree + dorogovtsev_goltsev_mendes_graph Examples ~~~~~~~~ - Kevin Bacon movie actor graph: Examples/kevin_bacon.py - Compute eigenvalues of graph Laplacian: Examples/eigenvalues.py - Atlas of small graphs: Examples/atlas.py Documentation ~~~~~~~~~~~~~ - Rewrite of setup scripts to install documentation and tests in documentation directory specified Bug fixes ~~~~~~~~~ - Handle calls to edges() with non-node, non-iterable items. - truncated_tetrahedral_graph was just plain wrong - Speedup of betweenness_centrality code - bfs_path_length now returns correct lengths - Catch error if target of search not in connected component of source - Code cleanup to label internal functions with _name - Changed import statement lines to always use "import NX" to protect name-spaces - Other minor bug-fixes and testing added networkx-1.8.1/doc/source/reference/algorithms.swap.rst0000664000175000017500000000022612177456333023154 0ustar aricaric00000000000000**** Swap **** .. automodule:: networkx.algorithms.swap .. autosummary:: :toctree: generated/ double_edge_swap connected_double_edge_swap networkx-1.8.1/doc/source/reference/readwrite.sparsegraph6.rst0000664000175000017500000000034012177456333024421 0ustar aricaric00000000000000SparseGraph6 ============ .. automodule:: networkx.readwrite.sparsegraph6 .. autosummary:: :toctree: generated/ read_graph6 parse_graph6 read_graph6_list read_sparse6 parse_sparse6 read_sparse6_list networkx-1.8.1/doc/source/reference/api_1.6.rst0000664000175000017500000000650212177456333021172 0ustar aricaric00000000000000********************************* Version 1.6 notes and API changes ********************************* This page reflects API changes from networkx-1.5 to networkx-1.6. Please send comments and questions to the networkx-discuss mailing list: http://groups.google.com/group/networkx-discuss . Graph Classes ------------- The degree* methods in the graph classes (Graph, DiGraph, MultiGraph, MultiDiGraph) now take an optional weight= keyword that allows computing weighted degree with arbitrary (numerical) edge attributes. Setting weight=None is equivalent to the previous weighted=False. Weighted graph algorithms ------------------------- Many 'weighted' graph algorithms now take optional parameter to specifiy which edge attribute should be used for the weight (default='weight') (:ticket:`573`) In some cases the parameter name was changed from weighted, to weight. Here is how to specify which edge attribute will be used in the algorithms: - Use weight=None to consider all weights equally (unweighted case) - Use weight='weight' to use the 'weight' edge atribute - Use weight='other' to use the 'other' edge attribute Algorithms affected are: to_scipy_sparse_matrix, clustering, average_clustering, bipartite.degree, spectral_layout, neighbor_degree, is_isomorphic, betweenness_centrality, betweenness_centrality_subset, vitality, load_centrality, mincost, shortest_path, shortest_path_length, average_shortest_path_length Isomorphisms ------------ Node and edge attributes are now more easily incorporated into isomorphism checks via the 'node_match' and 'edge_match' parameters. As part of this change, the following classes were removed:: WeightedGraphMatcher WeightedDiGraphMatcher WeightedMultiGraphMatcher WeightedMultiDiGraphMatcher The function signature for 'is_isomorphic' is now simply:: is_isomorphic(g1, g2, node_match=None, edge_match=None) See its docstring for more details. To aid in the creation of 'node_match' and 'edge_match' functions, users are encouraged to work with:: categorical_node_match categorical_edge_match categroical_multiedge_match numerical_node_match numerical_edge_match numerical_multiedge_match generic_node_match generic_edge_match generic_multiedge_match These functions construct functions which can be passed to 'is_isomorphic'. Finally, note that the above functions are not imported into the top-level namespace and should be accessed from 'networkx.algorithms.isomorphism'. A useful import statement that will be repeated throughout documentation is:: import networkx.algorithms.isomorphism as iso Other ----- * attracting_components A list of lists is returned instead of a list of tuples. * condensation The condensation algorithm now takes a second argument (scc) and returns a graph with nodes labeled as integers instead of node tuples. * degree connectivity average_in_degree_connectivity and average_out_degree_connectivity have have been replaced with average_degree_connectivity(G, source='in', target='in') and average_degree_connectivity(G, source='out', target='out') * neighbor degree average_neighbor_in_degree and average_neighbor_out_degreey have have been replaced with average_neighbor_degree(G, source='in', target='in') and average_neighbor_degree(G, source='out', target='out') networkx-1.8.1/doc/source/reference/relabel.rst0000664000175000017500000000035012177456333021436 0ustar aricaric00000000000000**************** Relabeling nodes **************** .. currentmodule:: networkx Relabeling ---------- .. automodule:: networkx.relabel .. autosummary:: :toctree: generated/ convert_node_labels_to_integers relabel_nodes networkx-1.8.1/doc/source/reference/api_1.7.rst0000664000175000017500000000055112177456333021171 0ustar aricaric00000000000000********************************* Version 1.7 notes and API changes ********************************* This page reflects API changes from networkx-1.6 to networkx-1.7. Please send comments and questions to the networkx-discuss mailing list: http://groups.google.com/group/networkx-discuss . Other ----- * Untested bipartite_random_regular_graph() removed. networkx-1.8.1/doc/source/reference/readwrite.gexf.rst0000664000175000017500000000021612177456333022747 0ustar aricaric00000000000000GEXF ==== .. automodule:: networkx.readwrite.gexf .. autosummary:: :toctree: generated/ read_gexf write_gexf relabel_gexf_graph networkx-1.8.1/doc/source/reference/algorithms.graphical.rst0000664000175000017500000000051012177456333024130 0ustar aricaric00000000000000************************* Graphical degree sequence ************************* .. automodule:: networkx.algorithms.graphical .. autosummary:: :toctree: generated/ is_graphical is_digraphical is_multigraphical is_pseudographical is_valid_degree_sequence_havel_hakimi is_valid_degree_sequence_erdos_gallai networkx-1.8.1/doc/source/reference/algorithms.boundary.rst0000664000175000017500000000022612177456333024025 0ustar aricaric00000000000000******** Boundary ******** .. automodule:: networkx.algorithms.boundary .. autosummary:: :toctree: generated/ edge_boundary node_boundary networkx-1.8.1/doc/source/reference/algorithms.distance_measures.rst0000664000175000017500000000032612177456333025701 0ustar aricaric00000000000000***************** Distance Measures ***************** .. automodule:: networkx.algorithms.distance_measures .. autosummary:: :toctree: generated/ center diameter eccentricity periphery radius networkx-1.8.1/doc/source/reference/glossary.rst0000664000175000017500000000376012177456333021703 0ustar aricaric00000000000000.. _glossary: Glossary ======== .. glossary:: :sorted: edge Edges are either two-tuples of nodes (u,v) or three tuples of nodes with an edge attribute dictionary (u,v,dict). ebunch An iteratable container of edge tuples like a list, iterator, or file. edge attribute Edges can have arbitrary Python objects assigned as attributes by using keyword/value pairs when adding an edge assigning to the G.edge[u][v] attribute dictionary for the specified edge u-v. hashable An object is hashable if it has a hash value which never changes during its lifetime (it needs a __hash__() method), and can be compared to other objects (it needs an __eq__() or __cmp__() method). Hashable objects which compare equal must have the same hash value. Hashability makes an object usable as a dictionary key and a set member, because these data structures use the hash value internally. All of Python's immutable built-in objects are hashable, while no mutable containers (such as lists or dictionaries) are. Objects which are instances of user-defined classes are hashable by default; they all compare unequal, and their hash value is their id(). Definition from http://docs.python.org/glossary.html nbunch An nbunch is any iterable container of nodes that is not itself a node in the graph. It can be an iterable or an iterator, e.g. a list, set, graph, file, etc.. node attribute Nodes can have arbitrary Python objects assigned as attributes by using keyword/value pairs when adding a node or assigning to the G.node[n] attribute dictionary for the specified node n. node A node can be any hashable Python object except None. dictionary A Python dictionary maps keys to values. Also known as "hashes", or "associative arrays". See http://docs.python.org/tutorial/datastructures.html#dictionaries networkx-1.8.1/doc/source/reference/algorithms.dag.rst0000664000175000017500000000042612177456333022737 0ustar aricaric00000000000000*********************** Directed Acyclic Graphs *********************** .. automodule:: networkx.algorithms.dag .. autosummary:: :toctree: generated/ ancestors descendants topological_sort topological_sort_recursive is_directed_acyclic_graph is_aperiodic networkx-1.8.1/doc/source/reference/functions.rst0000664000175000017500000000132312177456333022041 0ustar aricaric00000000000000********* Functions ********* .. automodule:: networkx.classes.function Graph ----- .. autosummary:: :toctree: generated/ degree degree_histogram density info create_empty_copy is_directed Nodes ----- .. autosummary:: :toctree: generated/ nodes number_of_nodes nodes_iter all_neighbors non_neighbors Edges ----- .. autosummary:: :toctree: generated/ edges number_of_edges edges_iter Attributes ---------- .. autosummary:: :toctree: generated/ set_node_attributes get_node_attributes set_edge_attributes get_edge_attributes Freezing graph structure ------------------------ .. autosummary:: :toctree: generated/ freeze is_frozen networkx-1.8.1/doc/source/reference/algorithms.flow.rst0000664000175000017500000000063212177456333023152 0ustar aricaric00000000000000***** Flows ***** .. automodule:: networkx.algorithms.flow Ford-Fulkerson -------------- .. autosummary:: :toctree: generated/ max_flow min_cut ford_fulkerson ford_fulkerson_flow ford_fulkerson_flow_and_auxiliary Network Simplex --------------- .. autosummary:: :toctree: generated/ network_simplex min_cost_flow_cost min_cost_flow cost_of_flow max_flow_min_cost networkx-1.8.1/doc/source/reference/api_changes.rst0000664000175000017500000000022112177456333022266 0ustar aricaric00000000000000*********** API changes *********** .. toctree:: :maxdepth: 2 api_1.8 api_1.7 api_1.6 api_1.5 api_1.4 api_1.0 api_0.99 networkx-1.8.1/doc/source/reference/linalg.rst0000664000175000017500000000134512177456333021303 0ustar aricaric00000000000000.. _linalg: Linear algebra ************** .. currentmodule:: networkx Graph Matrix ------------ .. automodule:: networkx.linalg.graphmatrix .. autosummary:: :toctree: generated/ adjacency_matrix incidence_matrix Laplacian Matrix ---------------- .. automodule:: networkx.linalg.laplacianmatrix .. autosummary:: :toctree: generated/ laplacian_matrix normalized_laplacian_matrix directed_laplacian_matrix Spectrum --------- .. automodule:: networkx.linalg.spectrum .. autosummary:: :toctree: generated/ laplacian_spectrum adjacency_spectrum Attribute Matrices ------------------ .. automodule:: networkx.linalg.attrmatrix .. autosummary:: :toctree: generated/ attr_matrix attr_sparse_matrix networkx-1.8.1/doc/source/reference/algorithms.distance_regular.rst0000664000175000017500000000035612177456333025521 0ustar aricaric00000000000000*********************** Distance-Regular Graphs *********************** .. automodule:: networkx.algorithms.distance_regular .. autosummary:: :toctree: generated/ is_distance_regular intersection_array global_parameters networkx-1.8.1/doc/source/reference/algorithms.mis.rst0000664000175000017500000000026712177456333022777 0ustar aricaric00000000000000*********************** Maximal independent set *********************** .. automodule:: networkx.algorithms.mis .. autosummary:: :toctree: generated/ maximal_independent_set networkx-1.8.1/doc/source/reference/readwrite.pajek.rst0000664000175000017500000000021612177456333023110 0ustar aricaric00000000000000Pajek ===== .. automodule:: networkx.readwrite.pajek .. autosummary:: :toctree: generated/ read_pajek write_pajek parse_pajek networkx-1.8.1/doc/source/reference/readwrite.nx_shp.rst0000664000175000017500000000021312177456333023312 0ustar aricaric00000000000000GIS Shapefile ============= .. automodule:: networkx.readwrite.nx_shp .. autosummary:: :toctree: generated/ read_shp write_shp networkx-1.8.1/doc/source/reference/algorithms.link_analysis.rst0000664000175000017500000000064012177456333025042 0ustar aricaric00000000000000************* Link Analysis ************* PageRank -------- .. automodule:: networkx.algorithms.link_analysis.pagerank_alg .. autosummary:: :toctree: generated/ pagerank pagerank_numpy pagerank_scipy google_matrix Hits ---- .. automodule:: networkx.algorithms.link_analysis.hits_alg .. autosummary:: :toctree: generated/ hits hits_numpy hits_scipy hub_matrix authority_matrix networkx-1.8.1/doc/source/reference/algorithms.shortest_paths.rst0000664000175000017500000000243612177456333025261 0ustar aricaric00000000000000Shortest Paths ============== .. automodule:: networkx.algorithms.shortest_paths.generic .. autosummary:: :toctree: generated/ shortest_path all_shortest_paths shortest_path_length average_shortest_path_length has_path Advanced Interface ------------------ .. automodule:: networkx.algorithms.shortest_paths.unweighted .. autosummary:: :toctree: generated/ single_source_shortest_path single_source_shortest_path_length all_pairs_shortest_path all_pairs_shortest_path_length predecessor .. automodule:: networkx.algorithms.shortest_paths.weighted .. autosummary:: :toctree: generated/ dijkstra_path dijkstra_path_length single_source_dijkstra_path single_source_dijkstra_path_length all_pairs_dijkstra_path all_pairs_dijkstra_path_length single_source_dijkstra bidirectional_dijkstra dijkstra_predecessor_and_distance bellman_ford negative_edge_cycle Dense Graphs ------------ .. automodule:: networkx.algorithms.shortest_paths.dense .. autosummary:: :toctree: generated/ floyd_warshall floyd_warshall_predecessor_and_distance floyd_warshall_numpy A* Algorithm ------------ .. automodule:: networkx.algorithms.shortest_paths.astar .. autosummary:: :toctree: generated/ astar_path astar_path_length networkx-1.8.1/doc/source/reference/algorithms.rst0000664000175000017500000000162612177456333022210 0ustar aricaric00000000000000.. _algorithms: ********** Algorithms ********** .. currentmodule:: networkx .. toctree:: :maxdepth: 2 algorithms.approximation algorithms.assortativity algorithms.bipartite algorithms.block algorithms.boundary algorithms.centrality algorithms.chordal algorithms.clique algorithms.clustering algorithms.community algorithms.component algorithms.connectivity algorithms.core algorithms.cycles algorithms.dag algorithms.distance_measures algorithms.distance_regular algorithms.euler algorithms.flow algorithms.graphical algorithms.hierarchy algorithms.isolates algorithms.isomorphism algorithms.link_analysis algorithms.matching algorithms.mis algorithms.mst algorithms.operators algorithms.rich_club algorithms.shortest_paths algorithms.simple_paths algorithms.swap algorithms.traversal algorithms.vitality networkx-1.8.1/doc/source/reference/algorithms.clique.rst0000664000175000017500000000046412177456333023470 0ustar aricaric00000000000000****** Clique ****** .. automodule:: networkx.algorithms.clique .. autosummary:: :toctree: generated/ find_cliques make_max_clique_graph make_clique_bipartite graph_clique_number graph_number_of_cliques node_clique_number number_of_cliques cliques_containing_node networkx-1.8.1/doc/source/reference/algorithms.approximation.rst0000664000175000017500000000202112177456333025067 0ustar aricaric00000000000000************* Approximation ************* .. automodule:: networkx.algorithms.approximation Clique ------ .. automodule:: networkx.algorithms.approximation.clique .. autosummary:: :toctree: generated/ max_clique clique_removal Dominating Set --------------- .. automodule:: networkx.algorithms.approximation.dominating_set .. autosummary:: :toctree: generated/ min_weighted_dominating_set min_edge_dominating_set Independent Set --------------- .. automodule:: networkx.algorithms.approximation.independent_set .. autosummary:: :toctree: generated/ maximum_independent_set Matching -------- .. automodule:: networkx.algorithms.approximation.matching .. autosummary:: :toctree: generated/ min_maximal_matching Ramsey ------ .. automodule:: networkx.algorithms.approximation.ramsey .. autosummary:: :toctree: generated/ ramsey_R2 Vertex Cover ------------ .. automodule:: networkx.algorithms.approximation.vertex_cover .. autosummary:: :toctree: generated/ min_weighted_vertex_cover networkx-1.8.1/doc/source/reference/algorithms.bipartite.rst0000664000175000017500000000240012177456333024161 0ustar aricaric00000000000000********* Bipartite ********* .. automodule:: networkx.algorithms.bipartite Basic functions --------------- .. automodule:: networkx.algorithms.bipartite.basic .. autosummary:: :toctree: generated/ is_bipartite is_bipartite_node_set sets color density degrees biadjacency_matrix Projections ----------- .. automodule:: networkx.algorithms.bipartite.projection .. autosummary:: :toctree: generated/ projected_graph weighted_projected_graph collaboration_weighted_projected_graph overlap_weighted_projected_graph generic_weighted_projected_graph Spectral -------- .. automodule:: networkx.algorithms.bipartite.spectral .. autosummary:: :toctree: generated/ spectral_bipartivity Clustering ---------- .. automodule:: networkx.algorithms.bipartite.cluster .. autosummary:: :toctree: generated/ clustering average_clustering latapy_clustering robins_alexander_clustering Redundancy ---------- .. automodule:: networkx.algorithms.bipartite.redundancy .. autosummary:: :toctree: generated/ node_redundancy Centrality ---------- .. automodule:: networkx.algorithms.bipartite.centrality .. autosummary:: :toctree: generated/ closeness_centrality degree_centrality betweenness_centrality networkx-1.8.1/doc/source/reference/readwrite.yaml.rst0000664000175000017500000000017412177456333022763 0ustar aricaric00000000000000YAML ==== .. automodule:: networkx.readwrite.nx_yaml .. autosummary:: :toctree: generated/ read_yaml write_yaml networkx-1.8.1/doc/source/reference/readwrite.multiline_adjlist.rst0000664000175000017500000000040112177456333025526 0ustar aricaric00000000000000 Multiline Adjacency List ======================== .. automodule:: networkx.readwrite.multiline_adjlist .. autosummary:: :toctree: generated/ read_multiline_adjlist write_multiline_adjlist parse_multiline_adjlist generate_multiline_adjlist networkx-1.8.1/doc/source/reference/readwrite.gpickle.rst0000664000175000017500000000020412177456333023431 0ustar aricaric00000000000000Pickle ====== .. automodule:: networkx.readwrite.gpickle .. autosummary:: :toctree: generated/ read_gpickle write_gpickle networkx-1.8.1/doc/source/reference/algorithms.euler.rst0000664000175000017500000000022312177456333023313 0ustar aricaric00000000000000******** Eulerian ******** .. automodule:: networkx.algorithms.euler .. autosummary:: :toctree: generated/ is_eulerian eulerian_circuit networkx-1.8.1/doc/source/reference/api_1.0.rst0000664000175000017500000002025412177456333021164 0ustar aricaric00000000000000********************************* Version 1.0 notes and API changes ********************************* We have made some significant API changes, detailed below, to add functionality and clarity. This page reflects changes from networkx-0.99 to networkx-1.0. For changes from earlier versions to networkx-0.99 see :doc:`Version 0.99 API changes `. Version 1.0 requires Python 2.4 or greater. Please send comments and questions to the networkx-discuss mailing list: http://groups.google.com/group/networkx-discuss . Version numbering ================= In the future we will use a more standard release numbering system with major.minor[build] labels where major and minor are numbers and [build] is a label such as "dev1379" to indicate a development version or "rc1" to indicate a release candidate. We plan on sticking closer to a time-based release schedule with smaller incremental changes released on a roughly quarterly basis. The graph classes API will remain fixed, unless we determine there are serious bugs or other defects in the existing classes, until networkx-2.0 is released at some time in the future. Changes in base classes ======================= The most significant changes in are in the graph classes. All of the graph classes now allow optional graph, node, and edge attributes. Those attributes are stored internally in the graph classes as dictionaries and can be accessed simply like Python dictionaries in most cases. Graph attributes ---------------- Each graph keeps a dictionary of key=value attributes in the member G.graph. These attributes can be accessed directly using G.graph or added at instantiation using keyword arguments. >>> G=nx.Graph(region='Africa') >>> G.graph['color']='green' >>> G.graph {'color': 'green', 'region': 'Africa'} Node attributes --------------- Each node has a corresponding dictionary of attributes. Adding attributes to nodes is optional. Add node attributes using add_node(), add_nodes_from() or G.node >>> G.add_node(1, time='5pm') >>> G.add_nodes_from([3], time='2pm') >>> G.node[1] {'time': '5pm'} >>> G.node[1]['room'] = 714 >>> G.nodes(data=True) [(1, {'room': 714, 'time': '5pm'}), (3, {'time': '2pm'})] Edge attributes --------------- Each edge has a corresponding dictionary of attributes. The default edge data is now an empty dictionary of attributes and adding attributes to edges is optional. A common use case is to add a weight attribute to an edge: >>> G.add_edge(1,2,weight=3.14159) Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edge. >>> G.add_edge(1, 2, weight=4.7 ) >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) >>> G[1][2]['weight'] = 4.7 >>> G.edge[1][2]['weight'] = 4 Methods changed --------------- Graph(), DiGraph(), MultiGraph(), MultiDiGraph() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Now takes optional keyword=value attributes on initialization. >>> G=nx.Graph(year='2009',city='New York') add_node() ^^^^^^^^^^ Now takes optional keyword=value attributes or a dictionary of attributes. >>> G.add_node(1,room=714) add_nodes_from() ^^^^^^^^^^^^^^^^ Now takes optional keyword=value attributes or a dictionary of attributes applied to all affected nodes. >>> G.add_nodes_from([1,2],time='2pm') # all nodes have same attribute add_edge() ^^^^^^^^^^ Now takes optional keyword=value attributes or a dictionary of attributes. >>> G.add_edge(1, 2, weight=4.7 ) add_edges_from() ^^^^^^^^^^^^^^^^ Now takes optional keyword=value attributes or a dictionary of attributes applied to all affected edges. >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) nodes() and nodes_iter() ^^^^^^^^^^^^^^^^^^^^^^^^ New keyword data=True|False keyword determines whether to return two-tuples (n,dict) (True) with node attribution dictionary >>> G=nx.Graph([(1,2),(3,4)]) >>> G.nodes(data=True) [(1, {}), (2, {}), (3, {}), (4, {})] copy() ^^^^^^ Now returns a deep copy of the graph (copies all underlying data and attributes for nodes and edges). Use the class initializer to make a shallow copy: >>> G=nx.Graph() >>> G_shallow=nx.Graph(G) # shallow copy >>> G_deep=G.copy() # deep copy to_directed(), to_undirected() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Now returns a deep copy of the graph (copies all underlying data and attributes for nodes and edges). Use the class initializer to make a shallow copy: >>> G=nx.Graph() >>> D_shallow=nx.DiGraph(G) # shallow copy >>> D_deep=G.to_directed() # deep copy subgraph() ^^^^^^^^^^ With copy=True now returns a deep copy of the graph (copies all underlying data and attributes for nodes and edges). >>> G=nx.Graph() >>> # note: copy keyword deprecated in networkx>1.0 >>> # H=G.subgraph([],copy=True) # deep copy of all data add_cycle(), add_path(), add_star() ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Now take optional keyword=value attributes or a dictionary of attributes which are applied to all edges affected by the method. >>> G=nx.Graph() >>> G.add_path([0,1,2,3],width=3.2) Methods removed --------------- delete_node() ^^^^^^^^^^^^^ The preferred name is now remove_node(). delete_nodes_from() ^^^^^^^^^^^^^^^^^^^ No longer raises an exception on an attempt to delete a node not in the graph. The preferred name is now remove_nodes_from(). delete_edge() ^^^^^^^^^^^^^ Now raises an exception on an attempt to delete an edge not in the graph. The preferred name is now remove_edge(). delete_edges_from() ^^^^^^^^^^^^^^^^^^^ The preferred name is now remove_edges_from(). has_neighbor(): Use has_edge() get_edge() ^^^^^^^^^^ Renamed to get_edge_data(). Returns the edge attribute dictionary. The fastest way to get edge data for edge (u,v) is to use G[u][v] instead of G.get_edge_data(u,v) Members removed --------------- directed, multigraph, weighted ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use methods G.is_directed() and G.is_multigraph(). All graphs are weighted graphs now if they have numeric values in the 'weight' edge attribute. Methods added ------------- add_weighted edges_from() ^^^^^^^^^^^^^^^^^^^^^^^^^ Convenience method to add weighted edges to graph using a list of 3-tuples (u,v,weight). get_edge_data() ^^^^^^^^^^^^^^^ Renamed from get_edge(). The fastest way to get edge data for edge (u,v) is to use G[u][v] instead of G.get_edge_data(u,v) is_directed() ^^^^^^^^^^^^^ replaces member G.directed is_multigraph() ^^^^^^^^^^^^^^^ replaces member G.multigraph Classes Removed --------------- LabeledGraph, LabeledDiGraph ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ These classes have been folded into the regular classes. UbiGraph ^^^^^^^^ Removed as the ubigraph platform is no longer being supported. Additional functions/generators =============================== ego_graph, stochastic_graph, PageRank algorithm, HITS algorithm, GraphML writer, freeze, is_frozen, A* algorithm, directed scale-free generator, random clustered graph. Converting your existing code to networkx-1.0 ============================================= Weighted edges -------------- Edge information is now stored in an attribution dictionary so all edge data must be given a key to identify it. There is currently only one standard/reserved key, 'weight', which is used by algorithms and functions that use weighted edges. The associated value should be numeric. All other keys are available for users to assign as needed. >>> G=nx.Graph() >>> G.add_edge(1,2,weight=3.1415) # add the edge 1-2 with a weight >>> G[1][2]['weight']=2.3 # set the weight to 2.3 Similarly, for direct access the edge data, use the key of the edge data to retrieve it. >>> w = G[1][2]['weight'] All NetworkX algorithms that require/use weighted edges now use the 'weight' edge attribute. If you have existing algorithms that assumed the edge data was numeric, you should replace G[u][v] and G.get_edge(u,v) with G[u][v]['weight']. An idiom for getting a weight for graphs with or without an assigned weight key is >>> w= G[1][2].get('weight',1) # set w to 1 if there is no 'weight' key networkx-1.8.1/doc/source/reference/classes.graph.rst0000664000175000017500000000301312177456333022564 0ustar aricaric00000000000000.. _graph: ========================================== Graph -- Undirected graphs with self loops ========================================== Overview ======== .. currentmodule:: networkx .. autofunction:: Graph Adding and removing nodes and edges =================================== .. autosummary:: :toctree: generated/ Graph.__init__ Graph.add_node Graph.add_nodes_from Graph.remove_node Graph.remove_nodes_from Graph.add_edge Graph.add_edges_from Graph.add_weighted_edges_from Graph.remove_edge Graph.remove_edges_from Graph.add_star Graph.add_path Graph.add_cycle Graph.clear Iterating over nodes and edges ============================== .. autosummary:: :toctree: generated/ Graph.nodes Graph.nodes_iter Graph.__iter__ Graph.edges Graph.edges_iter Graph.get_edge_data Graph.neighbors Graph.neighbors_iter Graph.__getitem__ Graph.adjacency_list Graph.adjacency_iter Graph.nbunch_iter Information about graph structure ================================= .. autosummary:: :toctree: generated/ Graph.has_node Graph.__contains__ Graph.has_edge Graph.order Graph.number_of_nodes Graph.__len__ Graph.degree Graph.degree_iter Graph.size Graph.number_of_edges Graph.nodes_with_selfloops Graph.selfloop_edges Graph.number_of_selfloops Making copies and subgraphs =========================== .. autosummary:: :toctree: generated/ Graph.copy Graph.to_undirected Graph.to_directed Graph.subgraph networkx-1.8.1/doc/source/reference/algorithms.matching.rst0000664000175000017500000000023612177456333023775 0ustar aricaric00000000000000******** Matching ******** .. automodule:: networkx.algorithms.matching .. autosummary:: :toctree: generated/ maximal_matching max_weight_matching networkx-1.8.1/doc/source/reference/algorithms.connectivity.rst0000664000175000017500000000115012177456333024715 0ustar aricaric00000000000000************ Connectivity ************ .. automodule:: networkx.algorithms.connectivity Connectivity functions ---------------------- .. automodule:: networkx.algorithms.connectivity.connectivity .. autosummary:: :toctree: generated/ average_node_connectivity local_node_connectivity node_connectivity local_edge_connectivity edge_connectivity all_pairs_node_connectivity_matrix Cut functions ------------- .. automodule:: networkx.algorithms.connectivity.cuts .. autosummary:: :toctree: generated/ minimum_st_node_cut minimum_node_cut minimum_st_edge_cut minimum_edge_cut networkx-1.8.1/doc/source/reference/algorithms.isomorphism.vf2.rst0000664000175000017500000000262412177456333025253 0ustar aricaric00000000000000.. _vf2: ************* VF2 Algorithm ************* .. automodule:: networkx.algorithms.isomorphism.isomorphvf2 Graph Matcher ------------- .. currentmodule:: networkx.algorithms.isomorphism .. autosummary:: :toctree: generated/ GraphMatcher.__init__ GraphMatcher.initialize GraphMatcher.is_isomorphic GraphMatcher.subgraph_is_isomorphic GraphMatcher.isomorphisms_iter GraphMatcher.subgraph_isomorphisms_iter GraphMatcher.candidate_pairs_iter GraphMatcher.match GraphMatcher.semantic_feasibility GraphMatcher.syntactic_feasibility DiGraph Matcher --------------- .. currentmodule:: networkx.algorithms.isomorphism .. autosummary:: :toctree: generated/ DiGraphMatcher.__init__ DiGraphMatcher.initialize DiGraphMatcher.is_isomorphic DiGraphMatcher.subgraph_is_isomorphic DiGraphMatcher.isomorphisms_iter DiGraphMatcher.subgraph_isomorphisms_iter DiGraphMatcher.candidate_pairs_iter DiGraphMatcher.match DiGraphMatcher.semantic_feasibility DiGraphMatcher.syntactic_feasibility Match helpers ------------- .. currentmodule:: networkx.algorithms.isomorphism .. autosummary:: :toctree: generated/ categorical_node_match categorical_edge_match categorical_multiedge_match numerical_node_match numerical_edge_match numerical_multiedge_match generic_node_match generic_edge_match generic_multiedge_match networkx-1.8.1/doc/source/reference/citing.rst0000664000175000017500000000130312177456333021304 0ustar aricaric00000000000000.. -*- coding: utf-8 -*- Citing ====== To cite NetworkX please use the following publication: Aric A. Hagberg, Daniel A. Schult and Pieter J. Swart, `"Exploring network structure, dynamics, and function using NetworkX" `_, in `Proceedings of the 7th Python in Science Conference (SciPy2008) `_, Gäel Varoquaux, Travis Vaught, and Jarrod Millman (Eds), (Pasadena, CA USA), pp. 11--15, Aug 2008 .. only:: html `PDF `_ `BibTeX `_ networkx-1.8.1/doc/source/reference/drawing.rst0000664000175000017500000000175012177456333021470 0ustar aricaric00000000000000.. _drawing: ******* Drawing ******* Matplotlib ========== .. automodule:: networkx.drawing.nx_pylab .. autosummary:: :toctree: generated/ draw draw_networkx draw_networkx_nodes draw_networkx_edges draw_networkx_labels draw_networkx_edge_labels draw_circular draw_random draw_spectral draw_spring draw_shell draw_graphviz Graphviz AGraph (dot) ===================== .. automodule:: networkx.drawing.nx_agraph .. autosummary:: :toctree: generated/ from_agraph to_agraph write_dot read_dot graphviz_layout pygraphviz_layout Graphviz with pydot =================== .. automodule:: networkx.drawing.nx_pydot .. autosummary:: :toctree: generated/ from_pydot to_pydot write_dot read_dot graphviz_layout pydot_layout Graph Layout ============ .. automodule:: networkx.drawing.layout .. autosummary:: :toctree: generated/ circular_layout random_layout shell_layout spring_layout spectral_layout networkx-1.8.1/doc/source/reference/algorithms.block.rst0000664000175000017500000000021512177456333023272 0ustar aricaric00000000000000************* Blockmodeling ************* .. automodule:: networkx.algorithms.block .. autosummary:: :toctree: generated/ blockmodel networkx-1.8.1/doc/source/reference/exceptions.rst0000664000175000017500000000062612177456333022217 0ustar aricaric00000000000000********** Exceptions ********** .. automodule:: networkx.exception .. currentmodule:: networkx .. autoclass:: networkx.NetworkXException .. autoclass:: networkx.NetworkXError .. autoclass:: networkx.NetworkXPointlessConcept .. autoclass:: networkx.NetworkXAlgorithmError .. autoclass:: networkx.NetworkXUnfeasible .. autoclass:: networkx.NetworkXNoPath .. autoclass:: networkx.NetworkXUnbounded networkx-1.8.1/doc/source/reference/algorithms.vitality.rst0000664000175000017500000000021112177456333024041 0ustar aricaric00000000000000******** Vitality ******** .. automodule:: networkx.algorithms.vitality .. autosummary:: :toctree: generated/ closeness_vitality networkx-1.8.1/doc/source/reference/utils.rst0000664000175000017500000000174312177456333021177 0ustar aricaric00000000000000********* Utilities ********* .. automodule:: networkx.utils .. currentmodule:: networkx.utils Helper functions ---------------- .. automodule:: networkx.utils.misc .. autosummary:: :toctree: generated/ is_string_like flatten iterable is_list_of_ints make_str cumulative_sum generate_unique_node default_opener Data structures and Algorithms ------------------------------ .. automodule:: networkx.utils.union_find .. autosummary:: :toctree: generated/ UnionFind.union Random sequence generators -------------------------- .. automodule:: networkx.utils.random_sequence .. autosummary:: :toctree: generated/ create_degree_sequence pareto_sequence powerlaw_sequence uniform_sequence cumulative_distribution discrete_sequence zipf_sequence zipf_rv random_weighted_sample weighted_choice Decorators ---------- .. automodule:: networkx.utils.decorators .. autosummary:: :toctree: generated/ open_file require networkx-1.8.1/doc/source/reference/readwrite.graphml.rst0000664000175000017500000000021012177456333023442 0ustar aricaric00000000000000GraphML ======= .. automodule:: networkx.readwrite.graphml .. autosummary:: :toctree: generated/ read_graphml write_graphml networkx-1.8.1/doc/source/reference/algorithms.chordal.rst0000664000175000017500000000037012177456333023616 0ustar aricaric00000000000000.. _chordal: Chordal ======= .. toctree:: :maxdepth: 2 .. automodule:: networkx.algorithms.chordal.chordal_alg .. autosummary:: :toctree: generated/ is_chordal chordal_graph_cliques chordal_graph_treewidth find_induced_nodes networkx-1.8.1/doc/source/reference/algorithms.traversal.rst0000664000175000017500000000105712177456333024210 0ustar aricaric00000000000000.. _traversal: Traversal ========= .. toctree:: :maxdepth: 2 Depth First Search ------------------ .. automodule:: networkx.algorithms.traversal.depth_first_search .. autosummary:: :toctree: generated/ dfs_edges dfs_tree dfs_predecessors dfs_successors dfs_preorder_nodes dfs_postorder_nodes dfs_labeled_edges Breadth First Search -------------------- .. automodule:: networkx.algorithms.traversal.breadth_first_search .. autosummary:: :toctree: generated/ bfs_edges bfs_tree bfs_predecessors bfs_successors networkx-1.8.1/doc/source/reference/algorithms.clustering.rst0000664000175000017500000000032112177456333024355 0ustar aricaric00000000000000********** Clustering ********** .. automodule:: networkx.algorithms.cluster .. autosummary:: :toctree: generated/ triangles transitivity clustering average_clustering square_clustering networkx-1.8.1/doc/source/reference/readwrite.rst0000664000175000017500000000062412177456333022022 0ustar aricaric00000000000000.. _readwrite: ************************** Reading and writing graphs ************************** .. toctree:: :maxdepth: 2 readwrite.adjlist readwrite.multiline_adjlist readwrite.edgelist readwrite.gexf readwrite.gml readwrite.gpickle readwrite.graphml readwrite.json_graph readwrite.leda readwrite.yaml readwrite.sparsegraph6 readwrite.pajek readwrite.nx_shp networkx-1.8.1/doc/source/reference/algorithms.mst.rst0000664000175000017500000000031012177456333022777 0ustar aricaric00000000000000********************* Minimum Spanning Tree ********************* .. automodule:: networkx.algorithms.mst .. autosummary:: :toctree: generated/ minimum_spanning_tree minimum_spanning_edges networkx-1.8.1/doc/source/reference/algorithms.community.rst0000664000175000017500000000037212177456333024230 0ustar aricaric00000000000000*********** Communities *********** .. automodule:: networkx.algorithms.community .. currentmodule:: networkx K-Clique ^^^^^^^^ .. automodule:: networkx.algorithms.community.kclique .. autosummary:: :toctree: generated/ k_clique_communities networkx-1.8.1/doc/source/reference/convert.rst0000664000175000017500000000136212177456333021514 0ustar aricaric00000000000000***************************************** Converting to and from other data formats ***************************************** .. currentmodule:: networkx To NetworkX Graph ----------------- .. automodule:: networkx.convert .. autosummary:: :toctree: generated/ to_networkx_graph Dictionaries ------------ .. autosummary:: :toctree: generated/ to_dict_of_dicts from_dict_of_dicts Lists ----- .. autosummary:: :toctree: generated/ to_dict_of_lists from_dict_of_lists to_edgelist from_edgelist Numpy ----- .. autosummary:: :toctree: generated/ to_numpy_matrix to_numpy_recarray from_numpy_matrix Scipy ----- .. autosummary:: :toctree: generated/ to_scipy_sparse_matrix from_scipy_sparse_matrix networkx-1.8.1/doc/source/reference/legal.rst0000664000175000017500000000335412177456333021123 0ustar aricaric00000000000000License ======= NetworkX is distributed with the BSD license. :: Copyright (C) 2004-2012, NetworkX Developers Aric Hagberg Dan Schult Pieter Swart All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the NetworkX Developers nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. networkx-1.8.1/doc/source/reference/introduction.rst0000664000175000017500000003061512177456333022560 0ustar aricaric00000000000000Introduction ~~~~~~~~~~~~ .. currentmodule:: networkx .. only:: html NetworkX provides data structures for graphs (or networks) along with graph algorithms, generators, and drawing tools. The structure of NetworkX can be seen by the organization of its source code. The package provides classes for graph objects, generators to create standard graphs, IO routines for reading in existing datasets, algorithms to analyse the resulting networks and some basic drawing tools. Most of the NetworkX API is provided by functions which take a graph object as an argument. Methods of the graph object are limited to basic manipulation and reporting. This provides modularity of code and documentation. It also makes it easier for newcomers to learn about the package in stages. The source code for each module is meant to be easy to read and reading this Python code is actually a good way to learn more about network algorithms, but we have put a lot of effort into making the documentation sufficient and friendly. If you have suggestions or questions please contact us by joining the `NetworkX Google group `_. Classes are named using CamelCase (capital letters at the start of each word). functions, methods and variable names are lower_case_underscore (lowercase with an underscore representing a space between words). NetworkX Basics --------------- After starting Python, import the networkx module with (the recommended way) >>> import networkx as nx To save repetition, in the documentation we assume that NetworkX has been imported this way. If importing networkx fails, it means that Python cannot find the installed module. Check your installation and your PYTHONPATH. The following basic graph types are provided as Python classes: :class:`Graph` This class implements an undirected graph. It ignores multiple edges between two nodes. It does allow self-loop edges between a node and itself. :class:`DiGraph` Directed graphs, that is, graphs with directed edges. Operations common to directed graphs, (a subclass of Graph). :class:`MultiGraph` A flexible graph class that allows multiple undirected edges between pairs of nodes. The additional flexibility leads to some degradation in performance, though usually not significant. :class:`MultiDiGraph` A directed version of a MultiGraph. Empty graph-like objects are created with >>> G=nx.Graph() >>> G=nx.DiGraph() >>> G=nx.MultiGraph() >>> G=nx.MultiDiGraph() All graph classes allow any :term:`hashable` object as a node. Hashable objects include strings, tuples, integers, and more. Arbitrary edge attributes such as weights and labels can be associated with an edge. The graph internal data structures are based on an adjacency list representation and implemented using Python :term:`dictionary` datastructures. The graph adjaceny structure is implemented as a Python dictionary of dictionaries; the outer dictionary is keyed by nodes to values that are themselves dictionaries keyed by neighboring node to the edge attributes associated with that edge. This "dict-of-dicts" structure allows fast addition, deletion, and lookup of nodes and neighbors in large graphs. The underlying datastructure is accessed directly by methods (the programming interface "API") in the class definitions. All functions, on the other hand, manipulate graph-like objects solely via those API methods and not by acting directly on the datastructure. This design allows for possible replacement of the 'dicts-of-dicts'-based datastructure with an alternative datastructure that implements the same methods. Graphs ======= The first choice to be made when using NetworkX is what type of graph object to use. A graph (network) is a collection of nodes together with a collection of edges that are pairs of nodes. Attributes are often associated with nodes and/or edges. NetworkX graph objects come in different flavors depending on two main properties of the network: - Directed: Are the edges **directed**? Does the order of the edge pairs (u,v) matter? A directed graph is specified by the "Di" prefix in the class name, e.g. DiGraph(). We make this distinction because many classical graph properties are defined differently for directed graphs. - Multi-edges: Are multiple edges allowed between each pair of nodes? As you might imagine, multiple edges requires a different data structure, though tricky users could design edge data objects to support this functionality. We provide a standard data structure and interface for this type of graph using the prefix "Multi", e.g. MultiGraph(). The basic graph classes are named: :doc:`Graph `, :doc:`DiGraph`, :doc:`MultiGraph `, and :doc:`MultiDiGraph ` Nodes and Edges --------------- The next choice you have to make when specifying a graph is what kinds of nodes and edges to use. If the topology of the network is all you care about then using integers or strings as the nodes makes sense and you need not worry about edge data. If you have a data structure already in place to describe nodes you can simply use that structure as your nodes provided it is :term:`hashable`. If it is not hashable you can use a unique identifier to represent the node and assign the data as a :term:`node attribute`. Edges often have data associated with them. Arbitrary data can associated with edges as an :term:`edge attribute`. If the data is numeric and the intent is to represent a *weighted* graph then use the 'weight' keyword for the attribute. Some of the graph algorithms, such as Dijkstra's shortest path algorithm, use this attribute name to get the weight for each edge. Other attributes can be assigned to an edge by using keyword/value pairs when adding edges. You can use any keyword except 'weight' to name your attribute and can then easily query the edge data by that attribute keyword. Once you've decided how to encode the nodes and edges, and whether you have an undirected/directed graph with or without multiedges you are ready to build your network. Graph Creation ============== NetworkX graph objects can be created in one of three ways: - Graph generators -- standard algorithms to create network topologies. - Importing data from pre-existing (usually file) sources. - Adding edges and nodes explicitly. Explicit addition and removal of nodes/edges is the easiest to describe. Each graph object supplies methods to manipulate the graph. For example, >>> import networkx as nx >>> G=nx.Graph() >>> G.add_edge(1,2) # default edge data=1 >>> G.add_edge(2,3,weight=0.9) # specify edge data Edge attributes can be anything: >>> import math >>> G.add_edge('y','x',function=math.cos) >>> G.add_node(math.cos) # any hashable can be a node You can add many edges at one time: >>> elist=[('a','b',5.0),('b','c',3.0),('a','c',1.0),('c','d',7.3)] >>> G.add_weighted_edges_from(elist) See the :doc:`/tutorial/index` for more examples. Some basic graph operations such as union and intersection are described in the :ref:`Operators module` documentation. Graph generators such as binomial_graph and powerlaw_graph are provided in the :doc:`generators` subpackage. For importing network data from formats such as GML, GraphML, edge list text files see the :doc:`readwrite` subpackage. Graph Reporting =============== Class methods are used for the basic reporting functions neighbors, edges and degree. Reporting of lists is often needed only to iterate through that list so we supply iterator versions of many property reporting methods. For example edges() and nodes() have corresponding methods edges_iter() and nodes_iter(). Using these methods when you can will save memory and often time as well. The basic graph relationship of an edge can be obtained in two basic ways. One can look for neighbors of a node or one can look for edges incident to a node. We jokingly refer to people who focus on nodes/neighbors as node-centric and people who focus on edges as edge-centric. The designers of NetworkX tend to be node-centric and view edges as a relationship between nodes. You can see this by our avoidance of notation like G[u,v] in favor of G[u][v]. Most data structures for sparse graphs are essentially adjacency lists and so fit this perspective. In the end, of course, it doesn't really matter which way you examine the graph. G.edges() removes duplicate representations of each edge while G.neighbors(n) or G[n] is slightly faster but doesn't remove duplicates. Any properties that are more complicated than edges, neighbors and degree are provided by functions. For example nx.triangles(G,n) gives the number of triangles which include node n as a vertex. These functions are grouped in the code and documentation under the term :ref:`algorithms`. Algorithms ========== A number of graph algorithms are provided with NetworkX. These include shortest path, and breadth first search (see :ref:`traversal`), clustering and isomorphism algorithms and others. There are many that we have not developed yet too. If you implement a graph algorithm that might be useful for others please let us know through the `NetworkX Google group `_ or the Github `Developer Zone `_. As an example here is code to use Dijkstra's algorithm to find the shortest weighted path: >>> G=nx.Graph() >>> e=[('a','b',0.3),('b','c',0.9),('a','c',0.5),('c','d',1.2)] >>> G.add_weighted_edges_from(e) >>> print(nx.dijkstra_path(G,'a','d')) ['a', 'c', 'd'] Drawing ======= While NetworkX is not designed as a network layout tool, we provide a simple interface to drawing packages and some simple layout algorithms. We interface to the excellent Graphviz layout tools like dot and neato with the (suggested) pygraphviz package or the pydot interface. Drawing can be done using external programs or the Matplotlib Python package. Interactive GUI interfaces are possible though not provided. The drawing tools are provided in the module :ref:`drawing`. The basic drawing functions essentially place the nodes on a scatterplot using the positions in a dictionary or computed with a layout function. The edges are then lines between those dots. >>> G=nx.cubical_graph() >>> nx.draw(G) # default spring_layout >>> nx.draw(G,pos=nx.spectral_layout(G), nodecolor='r',edge_color='b') See the :doc:`examples` for more ideas. Data Structure ============== NetworkX uses a "dictionary of dictionaries of dictionaries" as the basic network data structure. This allows fast lookup with reasonable storage for large sparse networks. The keys are nodes so G[u] returns an adjacency dictionary keyed by neighbor to the edge attribute dictionary. The expression G[u][v] returns the edge attribute dictionary itself. A dictionary of lists would have also been possible, but not allowed fast edge detection nor convenient storage of edge data. Advantages of dict-of-dicts-of-dicts data structure: - Find edges and remove edges with two dictionary look-ups. - Prefer to "lists" because of fast lookup with sparse storage. - Prefer to "sets" since data can be attached to edge. - G[u][v] returns the edge attribute dictionary. - ``n in G`` tests if node ``n`` is in graph G. - ``for n in G:`` iterates through the graph. - ``for nbr in G[n]:`` iterates through neighbors. As an example, here is a representation of an undirected graph with the edges ('A','B'), ('B','C') >>> G=nx.Graph() >>> G.add_edge('A','B') >>> G.add_edge('B','C') >>> print(G.adj) {'A': {'B': {}}, 'C': {'B': {}}, 'B': {'A': {}, 'C': {}}} The data structure gets morphed slightly for each base graph class. For DiGraph two dict-of-dicts-of-dicts structures are provided, one for successors and one for predecessors. For MultiGraph/MultiDiGraph we use a dict-of-dicts-of-dicts-of-dicts [#turtles]_ where the third dictionary is keyed by an edge key identifier to the fourth dictionary which contains the edge attributes for that edge between the two nodes. Graphs use a dictionary of attributes for each edge. We use a dict-of-dicts-of-dicts data structure with the inner dictionary storing "name-value" relationships for that edge. >>> G=nx.Graph() >>> G.add_edge(1,2,color='red',weight=0.84,size=300) >>> print(G[1][2]['size']) 300 .. rubric:: Footnotes .. [#turtles] "It's dictionaries all the way down." networkx-1.8.1/doc/source/reference/algorithms.core.rst0000664000175000017500000000024112177456333023127 0ustar aricaric00000000000000***** Cores ***** .. automodule:: networkx.algorithms.core .. autosummary:: :toctree: generated/ core_number k_core k_shell k_crust k_corona networkx-1.8.1/doc/source/reference/algorithms.simple_paths.rst0000664000175000017500000000022712177456333024673 0ustar aricaric00000000000000************ Simple Paths ************ .. automodule:: networkx.algorithms.simple_paths .. autosummary:: :toctree: generated/ all_simple_paths networkx-1.8.1/doc/source/reference/classes.multidigraph.rst0000664000175000017500000000440112177456333024156 0ustar aricaric00000000000000.. _multidigraph: ================================================================= MultiDiGraph - Directed graphs with self loops and parallel edges ================================================================= Overview ======== .. currentmodule:: networkx .. autofunction:: MultiDiGraph Adding and Removing Nodes and Edges =================================== .. autosummary:: :toctree: generated/ MultiDiGraph.__init__ MultiDiGraph.add_node MultiDiGraph.add_nodes_from MultiDiGraph.remove_node MultiDiGraph.remove_nodes_from MultiDiGraph.add_edge MultiDiGraph.add_edges_from MultiDiGraph.add_weighted_edges_from MultiDiGraph.remove_edge MultiDiGraph.remove_edges_from MultiDiGraph.add_star MultiDiGraph.add_path MultiDiGraph.add_cycle MultiDiGraph.clear Iterating over nodes and edges ============================== .. autosummary:: :toctree: generated/ MultiDiGraph.nodes MultiDiGraph.nodes_iter MultiDiGraph.__iter__ MultiDiGraph.edges MultiDiGraph.edges_iter MultiDiGraph.out_edges MultiDiGraph.out_edges_iter MultiDiGraph.in_edges MultiDiGraph.in_edges_iter MultiDiGraph.get_edge_data MultiDiGraph.neighbors MultiDiGraph.neighbors_iter MultiDiGraph.__getitem__ MultiDiGraph.successors MultiDiGraph.successors_iter MultiDiGraph.predecessors MultiDiGraph.predecessors_iter MultiDiGraph.adjacency_list MultiDiGraph.adjacency_iter MultiDiGraph.nbunch_iter Information about graph structure ================================= .. autosummary:: :toctree: generated/ MultiDiGraph.has_node MultiDiGraph.__contains__ MultiDiGraph.has_edge MultiDiGraph.order MultiDiGraph.number_of_nodes MultiDiGraph.__len__ MultiDiGraph.degree MultiDiGraph.degree_iter MultiDiGraph.in_degree MultiDiGraph.in_degree_iter MultiDiGraph.out_degree MultiDiGraph.out_degree_iter MultiDiGraph.size MultiDiGraph.number_of_edges MultiDiGraph.nodes_with_selfloops MultiDiGraph.selfloop_edges MultiDiGraph.number_of_selfloops Making copies and subgraphs =========================== .. autosummary:: :toctree: generated/ MultiDiGraph.copy MultiDiGraph.to_undirected MultiDiGraph.to_directed MultiDiGraph.subgraph MultiDiGraph.reverse networkx-1.8.1/doc/source/reference/algorithms.assortativity.rst0000664000175000017500000000143112177456333025126 0ustar aricaric00000000000000************* Assortativity ************* .. automodule:: networkx.algorithms.assortativity .. autosummary:: :toctree: generated/ Assortativity ------------- .. autosummary:: :toctree: generated/ degree_assortativity_coefficient attribute_assortativity_coefficient numeric_assortativity_coefficient degree_pearson_correlation_coefficient Average neighbor degree ----------------------- .. autosummary:: :toctree: generated/ average_neighbor_degree Average degree connectivity --------------------------- .. autosummary:: :toctree: generated/ average_degree_connectivity k_nearest_neighbors Mixing ------ .. autosummary:: :toctree: generated/ attribute_mixing_matrix degree_mixing_matrix degree_mixing_dict attribute_mixing_dict networkx-1.8.1/doc/source/reference/algorithms.cycles.rst0000664000175000017500000000021312177456333023460 0ustar aricaric00000000000000****** Cycles ****** .. automodule:: networkx.algorithms.cycles .. autosummary:: :toctree: generated/ cycle_basis simple_cycles networkx-1.8.1/doc/source/reference/readwrite.leda.rst0000664000175000017500000000017212177456333022724 0ustar aricaric00000000000000LEDA ==== .. automodule:: networkx.readwrite.leda .. autosummary:: :toctree: generated/ read_leda parse_leda networkx-1.8.1/doc/source/reference/generators.rst0000664000175000017500000000736512177456333022216 0ustar aricaric00000000000000.. _generators: Graph generators **************** .. currentmodule:: networkx Atlas ----- .. automodule:: networkx.generators.atlas .. autosummary:: :toctree: generated/ graph_atlas_g Classic ------- .. automodule:: networkx.generators.classic .. autosummary:: :toctree: generated/ balanced_tree barbell_graph complete_graph complete_bipartite_graph circular_ladder_graph cycle_graph dorogovtsev_goltsev_mendes_graph empty_graph grid_2d_graph grid_graph hypercube_graph ladder_graph lollipop_graph null_graph path_graph star_graph trivial_graph wheel_graph Small ----- .. automodule:: networkx.generators.small .. autosummary:: :toctree: generated/ make_small_graph LCF_graph bull_graph chvatal_graph cubical_graph desargues_graph diamond_graph dodecahedral_graph frucht_graph heawood_graph house_graph house_x_graph icosahedral_graph krackhardt_kite_graph moebius_kantor_graph octahedral_graph pappus_graph petersen_graph sedgewick_maze_graph tetrahedral_graph truncated_cube_graph truncated_tetrahedron_graph tutte_graph Random Graphs ------------- .. automodule:: networkx.generators.random_graphs .. autosummary:: :toctree: generated/ fast_gnp_random_graph gnp_random_graph dense_gnm_random_graph gnm_random_graph erdos_renyi_graph binomial_graph newman_watts_strogatz_graph watts_strogatz_graph connected_watts_strogatz_graph random_regular_graph barabasi_albert_graph powerlaw_cluster_graph random_lobster random_shell_graph random_powerlaw_tree random_powerlaw_tree_sequence Degree Sequence --------------- .. automodule:: networkx.generators.degree_seq .. autosummary:: :toctree: generated/ configuration_model directed_configuration_model expected_degree_graph havel_hakimi_graph directed_havel_hakimi_graph degree_sequence_tree random_degree_sequence_graph Random Clustered ---------------- .. automodule:: networkx.generators.random_clustered .. autosummary:: :toctree: generated/ random_clustered_graph Directed -------- .. automodule:: networkx.generators.directed .. autosummary:: :toctree: generated/ gn_graph gnr_graph gnc_graph scale_free_graph Geometric --------- .. automodule:: networkx.generators.geometric .. autosummary:: :toctree: generated/ random_geometric_graph geographical_threshold_graph waxman_graph navigable_small_world_graph Hybrid ------ .. automodule:: networkx.generators.hybrid .. autosummary:: :toctree: generated/ kl_connected_subgraph is_kl_connected Bipartite --------- .. automodule:: networkx.generators.bipartite .. autosummary:: :toctree: generated/ bipartite_configuration_model bipartite_havel_hakimi_graph bipartite_reverse_havel_hakimi_graph bipartite_alternating_havel_hakimi_graph bipartite_preferential_attachment_graph bipartite_random_graph bipartite_gnmk_random_graph Line Graph ---------- .. automodule:: networkx.generators.line .. autosummary:: :toctree: generated/ line_graph Ego Graph --------- .. automodule:: networkx.generators.ego .. autosummary:: :toctree: generated/ ego_graph Stochastic ---------- .. automodule:: networkx.generators.stochastic .. autosummary:: :toctree: generated/ stochastic_graph Intersection ------------ .. automodule:: networkx.generators.intersection .. autosummary:: :toctree: generated/ uniform_random_intersection_graph k_random_intersection_graph general_random_intersection_graph Social Networks --------------- .. automodule:: networkx.generators.social .. autosummary:: :toctree: generated/ karate_club_graph davis_southern_women_graph florentine_families_graph networkx-1.8.1/doc/source/reference/algorithms.operators.rst0000664000175000017500000000126412177456333024223 0ustar aricaric00000000000000.. _operators: Operators ********* .. automodule:: networkx.algorithms.operators.unary .. autosummary:: :toctree: generated/ complement reverse .. automodule:: networkx.algorithms.operators.binary .. autosummary:: :toctree: generated/ compose union disjoint_union intersection difference symmetric_difference .. automodule:: networkx.algorithms.operators.all .. autosummary:: :toctree: generated/ compose_all union_all disjoint_union_all intersection_all .. automodule:: networkx.algorithms.operators.product .. autosummary:: :toctree: generated/ cartesian_product lexicographic_product strong_product tensor_product networkx-1.8.1/doc/source/reference/index.rst0000664000175000017500000000053012177456333021137 0ustar aricaric00000000000000.. _reference: Reference ********* :Release: |release| :Date: |today| .. toctree:: :maxdepth: 2 introduction classes algorithms functions generators linalg convert relabel readwrite drawing exceptions utils legal citing credits glossary .. toctree:: :hidden: pdf_reference networkx-1.8.1/doc/source/reference/algorithms.centrality.rst0000664000175000017500000000237512177456333024367 0ustar aricaric00000000000000********** Centrality ********** .. automodule:: networkx.algorithms.centrality Degree ------ .. autosummary:: :toctree: generated/ degree_centrality in_degree_centrality out_degree_centrality Closeness --------- .. autosummary:: :toctree: generated/ closeness_centrality Betweenness ----------- .. autosummary:: :toctree: generated/ betweenness_centrality edge_betweenness_centrality Current Flow Closeness ---------------------- .. autosummary:: :toctree: generated/ current_flow_closeness_centrality Current-Flow Betweenness ------------------------ .. autosummary:: :toctree: generated/ current_flow_betweenness_centrality edge_current_flow_betweenness_centrality approximate_current_flow_betweenness_centrality Eigenvector ----------- .. autosummary:: :toctree: generated/ eigenvector_centrality eigenvector_centrality_numpy katz_centrality katz_centrality_numpy Communicability --------------- .. autosummary:: :toctree: generated/ communicability communicability_exp communicability_centrality communicability_centrality_exp communicability_betweenness_centrality estrada_index Load ---- .. autosummary:: :toctree: generated/ load_centrality edge_load networkx-1.8.1/doc/source/reference/classes.multigraph.rst0000664000175000017500000000346112177456333023646 0ustar aricaric00000000000000.. _multigraph: ================================================================= MultiGraph - Undirected graphs with self loops and parallel edges ================================================================= Overview ======== .. currentmodule:: networkx .. autofunction:: MultiGraph Adding and removing nodes and edges =================================== .. autosummary:: :toctree: generated/ MultiGraph.__init__ MultiGraph.add_node MultiGraph.add_nodes_from MultiGraph.remove_node MultiGraph.remove_nodes_from MultiGraph.add_edge MultiGraph.add_edges_from MultiGraph.add_weighted_edges_from MultiGraph.remove_edge MultiGraph.remove_edges_from MultiGraph.add_star MultiGraph.add_path MultiGraph.add_cycle MultiGraph.clear Iterating over nodes and edges ============================== .. autosummary:: :toctree: generated/ MultiGraph.nodes MultiGraph.nodes_iter MultiGraph.__iter__ MultiGraph.edges MultiGraph.edges_iter MultiGraph.get_edge_data MultiGraph.neighbors MultiGraph.neighbors_iter MultiGraph.__getitem__ MultiGraph.adjacency_list MultiGraph.adjacency_iter MultiGraph.nbunch_iter Information about graph structure ================================= .. autosummary:: :toctree: generated/ MultiGraph.has_node MultiGraph.__contains__ MultiGraph.has_edge MultiGraph.order MultiGraph.number_of_nodes MultiGraph.__len__ MultiGraph.degree MultiGraph.degree_iter MultiGraph.size MultiGraph.number_of_edges MultiGraph.nodes_with_selfloops MultiGraph.selfloop_edges MultiGraph.number_of_selfloops Making copies and subgraphs =========================== .. autosummary:: :toctree: generated/ MultiGraph.copy MultiGraph.to_undirected MultiGraph.to_directed MultiGraph.subgraph networkx-1.8.1/doc/source/reference/readwrite.gml.rst0000664000175000017500000000022012177456333022570 0ustar aricaric00000000000000GML === .. automodule:: networkx.readwrite.gml .. autosummary:: :toctree: generated/ read_gml write_gml parse_gml generate_gml networkx-1.8.1/doc/source/reference/credits.rst0000664000175000017500000001127012177456333021470 0ustar aricaric00000000000000Credits ------- NetworkX was originally written by Aric Hagberg, Dan Schult, and Pieter Swart, and has been developed with the help of many others. Thanks to Guido van Rossum for the idea of using Python for implementing a graph data structure http://www.python.org/doc/essays/graphs.html Thanks to David Eppstein for the idea of representing a graph G so that "for n in G" loops over the nodes in G and G[n] are node n's neighbors. Thanks to everyone who has improved NetworkX by contributing code, bug reports (and fixes), documentation, and input on design, featues, and the future of NetworkX. Thanks especially to the following contributors: - Katy Bold contributed the Karate Club graph. - Hernan Rozenfeld added dorogovtsev_goltsev_mendes_graph and did stress testing. - Brendt Wohlberg added examples from the Stanford GraphBase. - Jim Bagrow reported bugs in the search methods. - Holly Johnsen helped fix the path based centrality measures. - Arnar Flatberg fixed the graph laplacian routines. - Chris Myers suggested using None as a default datatype, suggested improvements for the IO routines, added grid generator index tuple labeling and associated routines, and reported bugs. - Joel Miller tested and improved the connected components methods fixed bugs and typos in the graph generators, and contributed the random clustered graph generator. - Keith Briggs sorted out naming issues for random graphs and wrote dense_gnm_random_graph. - Ignacio Rozada provided the Krapivsky-Redner graph generator. - Phillipp Pagel helped fix eccentricity etc. for disconnected graphs. - Sverre Sundsdal contributed bidirectional shortest path and Dijkstra routines, s-metric computation and graph generation - Ross M. Richardson contributed the expected degree graph generator and helped test the pygraphviz interface. - Christopher Ellison implemented the VF2 isomorphism algorithm and is a core developer. - Eben Kenah contributed the strongly connected components and DFS functions. - Sasha Gutfriend contributed edge betweenness algorithms. - Udi Weinsberg helped develop intersection and difference operators. - Matteo Dell'Amico wrote the random regular graph generator. - Andrew Conway contributed ego_graph, eigenvector centrality, line graph and much more. - Raf Guns wrote the GraphML writer. - Salim Fadhley and Matteo Dell'Amico contributed the A* algorithm. - Fabrice Desclaux contributed the Matplotlib edge labeling code. - Arpad Horvath fixed the barabasi_albert_graph() generator. - Minh Van Nguyen contributed the connected_watts_strogatz_graph() and documentation for the Graph and MultiGraph classes. - Willem Ligtenberg contributed the directed scale free graph generator. - Loïc Séguin-C. contributed the Ford-Fulkerson max flow and min cut algorithms, and ported all of NetworkX to Python3. He is a NetworkX core developer. - Paul McGuire improved the performance of the GML data parser. - Jesus Cerquides contributed the chordal graph algorithms. - Ben Edwards contributed tree generating functions, the rich club coefficient algorithm, the graph product functions, and a whole lot of other useful nuts and bolts. - Jon Olav Vik contributed cycle finding algorithms. - Hugh Brown improved the words.py example from the n^2 algorithm. - Ben Reilly contributed the shapefile reader and writer. - Leo Lopes contributed the maximal independent set algorithm. - Jordi Torrents contributed the bipartite clustering, bipartite node redundancy, square clustering, bipartite projection articulation point, and flow-based connectivity algorithms. - Dheeraj M R contributed the distance-regular testing algorithm - Franck Kalala contributed the subgraph_centrality and communicability algorithms - Simon Knight improved the GraphML functions to handle yEd/yfiles data, and to handle types correctly. - Conrad Lee contributed the k-clique community finding algorithm. - Sérgio Nery Simões wrote the function for finding all simple paths, and all shortest paths. - Robert King contributed union, disjoint union, compose, and intersection operators that work on lists of graphs. - Nick Mancuso wrote the approximation algorithms for dominating set, edge dominating set, independent set, max clique, and min-weighted vertex cover. - Brian Cloteaux contributed the linear-time graphicality tests and Havel-Hakimi graph generators - Alejandro Weinstein contributed the directed Laplacian code - Dustin Smith wrote the dictionary to numpy array function - Mathieu Larose sped up the topological sort code - Vincent Gauthier contributed the Katz centrality algorithm - Sérgio Nery Simões developed the code for finding all simple paths networkx-1.8.1/doc/source/reference/algorithms.isomorphism.rst0000664000175000017500000000063012177456333024552 0ustar aricaric00000000000000.. _isomorphism: *********** Isomorphism *********** .. toctree:: :maxdepth: 2 .. automodule:: networkx.algorithms.isomorphism .. autosummary:: :toctree: generated/ is_isomorphic could_be_isomorphic fast_could_be_isomorphic faster_could_be_isomorphic Advanced Interface to VF2 Algorithm ----------------------------------- .. toctree:: :maxdepth: 2 algorithms.isomorphism.vf2 networkx-1.8.1/doc/source/reference/algorithms.isolates.rst0000664000175000017500000000021412177456333024022 0ustar aricaric00000000000000******** Isolates ******** .. automodule:: networkx.algorithms.isolate .. autosummary:: :toctree: generated/ is_isolate isolates networkx-1.8.1/doc/source/reference/readwrite.json_graph.rst0000664000175000017500000000035712177456333024156 0ustar aricaric00000000000000JSON ==== .. automodule:: networkx.readwrite.json_graph .. autosummary:: :toctree: generated/ node_link_data node_link_graph adjacency_data adjacency_graph tree_data tree_graph dumps loads dump load networkx-1.8.1/doc/source/reference/classes.digraph.rst0000664000175000017500000000362512177456333023112 0ustar aricaric00000000000000.. _digraph: ========================================= DiGraph - Directed graphs with self loops ========================================= Overview ======== .. currentmodule:: networkx .. autofunction:: DiGraph Adding and removing nodes and edges =================================== .. autosummary:: :toctree: generated/ DiGraph.__init__ DiGraph.add_node DiGraph.add_nodes_from DiGraph.remove_node DiGraph.remove_nodes_from DiGraph.add_edge DiGraph.add_edges_from DiGraph.add_weighted_edges_from DiGraph.remove_edge DiGraph.remove_edges_from DiGraph.add_star DiGraph.add_path DiGraph.add_cycle DiGraph.clear Iterating over nodes and edges ============================== .. autosummary:: :toctree: generated/ DiGraph.nodes DiGraph.nodes_iter DiGraph.__iter__ DiGraph.edges DiGraph.edges_iter DiGraph.out_edges DiGraph.out_edges_iter DiGraph.in_edges DiGraph.in_edges_iter DiGraph.get_edge_data DiGraph.neighbors DiGraph.neighbors_iter DiGraph.__getitem__ DiGraph.successors DiGraph.successors_iter DiGraph.predecessors DiGraph.predecessors_iter DiGraph.adjacency_list DiGraph.adjacency_iter DiGraph.nbunch_iter Information about graph structure ================================= .. autosummary:: :toctree: generated/ DiGraph.has_node DiGraph.__contains__ DiGraph.has_edge DiGraph.order DiGraph.number_of_nodes DiGraph.__len__ DiGraph.degree DiGraph.degree_iter DiGraph.in_degree DiGraph.in_degree_iter DiGraph.out_degree DiGraph.out_degree_iter DiGraph.size DiGraph.number_of_edges DiGraph.nodes_with_selfloops DiGraph.selfloop_edges DiGraph.number_of_selfloops Making copies and subgraphs =========================== .. autosummary:: :toctree: generated/ DiGraph.copy DiGraph.to_undirected DiGraph.to_directed DiGraph.subgraph DiGraph.reverse networkx-1.8.1/doc/source/reference/api_1.4.rst0000664000175000017500000000202412177456333021163 0ustar aricaric00000000000000********************************* Version 1.4 notes and API changes ********************************* We have made some API changes, detailed below, to add clarity. This page reflects changes from networkx-1.3 to networkx-1.4. For changes from earlier versions to networkx-1.0 see :doc:`Version 1.0 API changes `. Please send comments and questions to the networkx-discuss mailing list: http://groups.google.com/group/networkx-discuss . Algorithms changed ================== Shortest path ------------- astar_path(), astar_path_length(), shortest_path(), shortest_path_length(), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ bidirectional_shortest_path(), dijkstra_path(), dijkstra_path_length(), ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ bidirectional_dijkstra() ^^^^^^^^^^^^^^^^^^^^^^^^ These algorithms now raise an exception when a source and a target are specified and no path exist between these two nodes. The exception is a NetworkXNoPath exception. networkx-1.8.1/doc/source/reference/algorithms.rich_club.rst0000664000175000017500000000021712177456333024134 0ustar aricaric00000000000000********* Rich Club ********* .. automodule:: networkx.algorithms.richclub .. autosummary:: :toctree: generated/ rich_club_coefficient networkx-1.8.1/doc/source/reference/api_0.99.rst0000664000175000017500000002032512177456333021264 0ustar aricaric00000000000000************************ Version 0.99 API changes ************************ The version networkx-0.99 is the penultimate release before networkx-1.0. We have bumped the version from 0.37 to 0.99 to indicate (in our unusual version number scheme) that this is a major change to NetworkX. We have made some significant changes, detailed below, to NetworkX to improve performance, functionality, and clarity. Version 0.99 requires Python 2.4 or greater. Please send comments and questions to the networkx-discuss mailing list. http://groups.google.com/group/networkx-discuss Changes in base classes ======================= The most significant changes are in the graph classes. We have redesigned the Graph() and DiGraph() classes to optionally allow edge data. This change allows Graph and DiGraph to naturally represent weighted graphs and to hold arbitrary information on edges. - Both Graph and DiGraph take an optional argument weighted=True|False. When weighted=True the graph is assumed to have numeric edge data (with default 1). The Graph and DiGraph classes in earlier versions used the Python None as data (which is still allowed as edge data). - The Graph and DiGraph classes now allow self loops. - The XGraph and XDiGraph classes are removed and replaced with MultiGraph and MultiDiGraph. MultiGraph and MultiDiGraph optionally allow parallel (multiple) edges between two nodes. The mapping from old to new classes is as follows:: - Graph -> Graph (self loops allowed now, default edge data is 1) - DiGraph -> DiGraph (self loops allowed now, default edge data is 1) - XGraph(multiedges=False) -> Graph - XGraph(multiedges=True) -> MultiGraph - XDiGraph(multiedges=False) -> DiGraph - XDiGraph(multiedges=True) -> MultiDiGraph Methods changed --------------- edges() ^^^^^^^ New keyword data=True|False keyword determines whether to return two-tuples (u,v) (False) or three-tuples (u,v,d) (True) delete_node() ^^^^^^^^^^^^^ The preferred name is now remove_node(). delete_nodes_from() ^^^^^^^^^^^^^^^^^^^ No longer raises an exception on an attempt to delete a node not in the graph. The preferred name is now remove_nodes_from(). delete_edge() ^^^^^^^^^^^^^^ Now raises an exception on an attempt to delete an edge not in the graph. The preferred name is now remove_edge(). delete_edges_from() ^^^^^^^^^^^^^^^^^^^ The preferred name is now remove_edges_from(). add_edge() ^^^^^^^^^^ The add_edge() method no longer accepts an edge tuple (u,v) directly. The tuple must be unpacked into individual nodes. >>> import networkx as nx >>> u='a' >>> v='b' >>> e=(u,v) >>> G=nx.Graph() Old >>> # G.add_edge((u,v)) # or G.add_edge(e) New >>> G.add_edge(*e) # or G.add_edge(*(u,v)) The * operator unpacks the edge tuple in the argument list. Add edge now has a data keyword parameter for setting the default (data=1) edge data. >>> # G.add_edge('a','b','foo') # add edge with string "foo" as data >>> # G.add_edge(1,2,5.0) # add edge with float 5 as data add_edges_from() ^^^^^^^^^^^^^^^^ Now can take list or iterator of either 2-tuples (u,v), 3-tuples (u,v,data) or a mix of both. Now has data keyword parameter (default 1) for setting the edge data for any edge in the edge list that is a 2-tuple. has_edge() ^^^^^^^^^^ The has_edge() method no longer accepts an edge tuple (u,v) directly. The tuple must be unpacked into individual nodes. Old: >>> # G.has_edge((u,v)) # or has_edge(e) New: >>> G.has_edge(*e) # or has_edge(*(u,v)) True The * operator unpacks the edge tuple in the argument list. get_edge() ^^^^^^^^^^ Now has the keyword argument "default" to specify what value to return if no edge is found. If not specified an exception is raised if no edge is found. The fastest way to get edge data for edge (u,v) is to use G[u][v] instead of G.get_edge(u,v) degree_iter() ^^^^^^^^^^^^^ The degree_iter method now returns an iterator over pairs of (node, degree). This was the previous behavior of degree_iter(with_labels=true) Also there is a new keyword weighted=False|True for weighted degree. subgraph() ^^^^^^^^^^ The argument inplace=False|True has been replaced with copy=True|False. Subgraph no longer takes create_using keyword. To change the graph type either make a copy of the graph first and then change type or change type and make a subgraph. E.g. >>> G=nx.path_graph(5) >>> H=nx.DiGraph(G.subgraph([0,1])) # digraph of copy of induced subgraph __getitem__() ^^^^^^^^^^^^^ Getting node neighbors from the graph with G[v] now returns a dictionary. >>> G=nx.path_graph(5) >>> # G[0] # {1: 1} To get a list of neighbors you can either use the keys of that dictionary or use >>> G.neighbors(0) [1] This change allows algorithms to use the underlying dict-of-dict representation through G[v] for substantial performance gains. Warning: The returned dictionary should not be modified as it may corrupt the graph data structure. Make a copy G[v].copy() if you wish to modify the dict. Methods removed --------------- info() ^^^^^^ now a function >>> G=nx.Graph(name='test me') >>> nx.info(G) Name: test me Type: Graph Number of nodes: 0 Number of edges: 0 node_boundary() ^^^^^^^^^^^^^^^ now a function edge_boundary() ^^^^^^^^^^^^^^^ now a function is_directed() ^^^^^^^^^^^^^ use the directed attribute >>> G=nx.DiGraph() >>> # G.directed # True G.out_edges() ^^^^^^^^^^^^^ use G.edges() G.in_edges() ^^^^^^^^^^^^ use >>> G=nx.DiGraph() >>> R=G.reverse() >>> R.edges() [] or >>> [(v,u) for (u,v) in G.edges()] [] Methods added ------------- adjacency_list() ^^^^^^^^^^^^^^^^ Returns a list-of-lists adjacency list representation of the graph. adjacency_iter() ^^^^^^^^^^^^^^^^ Returns an iterator of (node, adjacency_dict[node]) over all nodes in the graph. Intended for fast access to the internal data structure for use in internal algorithms. Other possible incompatibilities with existing code =================================================== Imports ------- Some of the code modules were moved into subdirectories. Import statements such as:: import networkx.centrality from networkx.centrality import * may no longer work (including that example). Use either >>> import networkx # e.g. centrality functions available as networkx.fcn() or >>> from networkx import * # e.g. centrality functions available as fcn() Self-loops ---------- For Graph and DiGraph self loops are now allowed. This might affect code or algorithms that add self loops which were intended to be ignored. Use the methods - nodes_with_selfloops() - selfloop_edges() - number_of_selfloops() to discover any self loops. Copy ---- Copies of NetworkX graphs including using the copy() method now return complete copies of the graph. This means that all connection information is copied--subsequent changes in the copy do not change the old graph. But node keys and edge data in the original and copy graphs are pointers to the same data. prepare_nbunch -------------- Used internally - now called nbunch_iter and returns an iterator. Converting your old code to Version 0.99 ======================================== Mostly you can just run the code and python will raise an exception for features that changed. Common places for changes are - Converting XGraph() to either Graph or MultiGraph - Converting XGraph.edges() to Graph.edges(data=True) - Switching some rarely used methods to attributes (e.g. directed) or to functions (e.g. node_boundary) - If you relied on the old default edge data being None, you will have to account for it now being 1. You may also want to look through your code for places which could improve speed or readability. The iterators are helpful with large graphs and getting edge data via G[u][v] is quite fast. You may also want to change G.neighbors(n) to G[n] which returns the dict keyed by neighbor nodes to the edge data. It is faster for many purposes but does not work well when you are changing the graph. networkx-1.8.1/doc/source/reference/readwrite.edgelist.rst0000664000175000017500000000035212177456333023617 0ustar aricaric00000000000000 Edge List ========= .. automodule:: networkx.readwrite.edgelist .. autosummary:: :toctree: generated/ read_edgelist write_edgelist read_weighted_edgelist write_weighted_edgelist generate_edgelist parse_edgelist networkx-1.8.1/doc/source/reference/pdf_reference.rst0000664000175000017500000000046512177456333022626 0ustar aricaric00000000000000.. _pdf_reference: Reference ********* :Release: |release| :Date: |today| .. toctree:: :maxdepth: 2 ../overview introduction classes algorithms functions generators linalg convert readwrite drawing exceptions utils legal citing credits glossary networkx-1.8.1/doc/source/reference/history.rst0000664000175000017500000000030612177456333021532 0ustar aricaric00000000000000History ******* Original Creators:: Aric Hagberg, hagberg@lanl.gov Pieter Swart, swart@lanl.gov Dan Schult, dschult@colgate.edu .. toctree:: :maxdepth: 2 api_changes news networkx-1.8.1/doc/source/reference/classes.rst0000664000175000017500000000155212177456333021472 0ustar aricaric00000000000000.. _classes: *********** Graph types *********** NetworkX provides data structures and methods for storing graphs. All NetworkX graph classes allow (hashable) Python objects as nodes. and any Python object can be assigned as an edge attribute. The choice of graph class depends on the structure of the graph you want to represent. Which graph class should I use? =============================== =================== ======================== Graph Type NetworkX Class =================== ======================== Undirected Simple Graph Directed Simple DiGraph With Self-loops Graph, DiGraph With Parallel edges MultiGraph, MultiDiGraph =================== ======================== Basic graph types ================= .. toctree:: :maxdepth: 2 classes.graph classes.digraph classes.multigraph classes.multidigraph networkx-1.8.1/doc/source/reference/algorithms.hierarchy.rst0000664000175000017500000000021112177456333024152 0ustar aricaric00000000000000********* Hierarchy ********* .. automodule:: networkx.algorithms.hierarchy .. autosummary:: :toctree: generated/ flow_hierarchy networkx-1.8.1/doc/source/reference/api_1.5.rst0000664000175000017500000000333612177456333021173 0ustar aricaric00000000000000********************************* Version 1.5 notes and API changes ********************************* This page reflects API changes from networkx-1.4 to networkx-1.5. Please send comments and questions to the networkx-discuss mailing list: http://groups.google.com/group/networkx-discuss . Weighted graph algorithms ------------------------- Many 'weighted' graph algorithms now take optional parameter to specifiy which edge attribute should be used for the weight (default='weight') (:ticket:`509`) In some cases the parameter name was changed from weighted_edges, or weighted, to weight. Here is how to specify which edge attribute will be used in the algorithms: - Use weight=None to consider all weights equally (unweighted case) - Use weight=True or weight='weight' to use the 'weight' edge atribute - Use weight='other' to use the 'other' edge attribute Algorithms affected are: betweenness_centrality, closeness_centrality, edge_bewteeness_centrality, betweeness_centrality_subset, edge_betweenness_centrality_subset, betweenness_centrality_source, load, closness_vitality, weiner_index, spectral_bipartivity current_flow_betweenness_centrality, edge_current_flow_betweenness_centrality, current_flow_betweenness_centrality_subset, edge_current_flow_betweenness_centrality_subset, laplacian, normalized_laplacian, adj_matrix, adjacency_spectrum, shortest_path, shortest_path_length, average_shortest_path_length, single_source_dijkstra_path_basic, astar_path, astar_path_length Random geometric graph ---------------------- The random geometric graph generator has been simplified. It no longer supports the create_using, repel, or verbose parameters. An optional pos keyword was added to allow specification of node positions. networkx-1.8.1/doc/source/reference/readwrite.adjlist.rst0000664000175000017500000000027312177456333023453 0ustar aricaric00000000000000 Adjacency List ============== .. automodule:: networkx.readwrite.adjlist .. autosummary:: :toctree: generated/ read_adjlist write_adjlist parse_adjlist generate_adjlist networkx-1.8.1/doc/source/overview.rst0000664000175000017500000000643712177456333017754 0ustar aricaric00000000000000.. -*- coding: utf-8 -*- Overview ======== NetworkX is a Python language software package for the creation, manipulation, and study of the structure, dynamics, and function of complex networks. With NetworkX you can load and store networks in standard and nonstandard data formats, generate many types of random and classic networks, analyze network structure, build network models, design new network algorithms, draw networks, and much more. Who uses NetworkX? ------------------ The potential audience for NetworkX includes mathematicians, physicists, biologists, computer scientists, and social scientists. Good reviews of the state-of-the-art in the science of complex networks are presented in Albert and Barabási [BA02]_, Newman [Newman03]_, and Dorogovtsev and Mendes [DM03]_. See also the classic texts [Bollobas01]_, [Diestel97]_ and [West01]_ for graph theoretic results and terminology. For basic graph algorithms, we recommend the texts of Sedgewick, e.g. [Sedgewick01]_ and [Sedgewick02]_ and the survey of Brandes and Erlebach [BE05]_. Goals ----- NetworkX is intended to provide - tools for the study the structure and dynamics of social, biological, and infrastructure networks, - a standard programming interface and graph implementation that is suitable for many applications, - a rapid development environment for collaborative, multidisciplinary projects, - an interface to existing numerical algorithms and code written in C, C++, and FORTRAN, - the ability to painlessly slurp in large nonstandard data sets. The Python programming language ------------------------------- Python is a powerful programming language that allows simple and flexible representations of networks, and clear and concise expressions of network algorithms (and other algorithms too). Python has a vibrant and growing ecosystem of packages that NetworkX uses to provide more features such as numerical linear algebra and drawing. In addition Python is also an excellent "glue" language for putting together pieces of software from other languages which allows reuse of legacy code and engineering of high-performance algorithms [Langtangen04]_. Equally important, Python is free, well-supported, and a joy to use. In order to make the most out of NetworkX you will want to know how to write basic programs in Python. Among the many guides to Python, we recommend the documentation at http://www.python.org and the text by Alex Martelli [Martelli03]_. Free software ------------- NetworkX is free software; you can redistribute it and/or modify it under the terms of the :doc:`BSD License `. We welcome contributions from the community. Information on NetworkX development is found at the NetworkX Developer Zone at Github https://github.com/networkx/networkx History ------- NetworkX was born in May 2002. The original version was designed and written by Aric Hagberg, Dan Schult, and Pieter Swart in 2002 and 2003. The first public release was in April 2005. Many people have contributed to the success of NetworkX. Some of the contributors are listed in the :doc:`credits. ` What Next ^^^^^^^^^ - :doc:`A Brief Tour ` - :doc:`Installing ` - :doc:`Reference ` - :doc:`Examples ` networkx-1.8.1/doc/source/developer/0000775000175000017500000000000012177457361017331 5ustar aricaric00000000000000networkx-1.8.1/doc/source/developer/gitwash/0000775000175000017500000000000012177457361020777 5ustar aricaric00000000000000networkx-1.8.1/doc/source/developer/gitwash/forking_hell.rst0000664000175000017500000000222712177456333024175 0ustar aricaric00000000000000.. _forking: ====================================================== Making your own copy (fork) of networkx ====================================================== You need to do this only once. The instructions here are very similar to the instructions at http://help.github.com/forking/ |emdash| please see that page for more detail. We're repeating some of it here just to give the specifics for the `networkx`_ project, and to suggest some default names. Set up and configure a github account ===================================== If you don't have a github account, go to the github page, and make one. You then need to configure your account to allow write access |emdash| see the ``Generating SSH keys`` help on `github help`_. Create your own forked copy of `networkx`_ ====================================================== #. Log into your github account. #. Go to the `networkx`_ github home at `networkx github`_. #. Click on the *fork* button: .. image:: forking_button.png Now, after a short pause and some 'Hardcore forking action', you should find yourself at the home page for your own forked copy of `networkx`_. .. include:: links.inc networkx-1.8.1/doc/source/developer/gitwash/forking_button.png0000664000175000017500000003144412177456333024543 0ustar aricaric00000000000000‰PNG  IHDR]Vl8EÀ pHYs  šœ IDATxí]|TÅöþv7›Ý´M‡„N]&„O ¨Ø ‚Xÿ‚(ú¤("‚}€`@z!Ò!@¨$†’ I6»ÉöÝÿ™{wÓHÅ ™ñ—½w§ÏwÎ|sæÌ,JlÀG€#ÀàÔÒi…7Âàp8œt¹"p8D€“n ‚Í›âp8œt¹p8D€“n ‚Í›âp8œt¹p8DÀ©ÛªSMY­Vܺu jµºN»:ëååH¥åÛ÷;ƇêЦš­£22“ð{ºwG(7oÞ#77·»ÓÀ}Z+»6žŸŸ/®ŸŸ_¹£¼Ÿ1æ8”+ú{2±²2+ß”¸'‡öÏèTnn.\\\þ½‡z)‘HÜ~…ûcŽCEÒ¿÷Ò++3î^¸K²³X,`B`«UC€áÆð«(Üïs*Ò€{/½22ã–î½'7Þ#ŽGà>F€“î},\>´ÒÈËË믾ŠåË— »‘ÒsÝ¿±/^ûã¡vàî…»„;s+p׃[ìîcFºÌ5ÁnGÈd2˜Íæ{VVÕÙ3g°hÑ"A0“'OF»víîٱ߹öÔnÉŠdÆ-ÝÚ•Ðzzz:¶mÛ†9sæì=33³Âžé¯¬C¿~ý0~U¡Õreõ$!nõm…åï,ƒWŽìÀŽ#q(ÏëšgïÛÝëÇõžÝŒX¶l™P844´\K7nÝ'– ã¢USÖ½Ûý__­äÎú\]¥ ÆÎê\²d 222ÊÅ@h[Ÿô/ŽA¿aã±îdrù]3\Ä»TnüJ¦Ÿ„ÃëýÑoüª8äaÓ»%ê&¼‡¾þ5ÎÜ2—_¤VNß«ÒnéV­ªä­„¥«Ñh°aÃ9rþþþ0`€0vïÞM›6 ýé§Ÿ.óÚ™Ù¦z$“ºÂ])‡Fg,°Z¤n.P:Ù 3U¥Ó•È+1áô7s°Bòõo ›±ô‰a³÷MáåMý0V½•9€¬ÆEG¤Óéðí·ßâêÕ«3f úôéS`å–f ùBqŸ¶]ÑÞ_¼ú—w3þ4&k%ÇdƒAG»=ÝÊpS‚„„*­V999˜;w.ŒFc$Ìâÿæ›o0}út¸ººèNANj̈́›B§ãá!@ö<E~‚Ð=«ÑÖ¹Œå—úžGå ‚~ÊÄúõJ¨k6hubC¡}Ÿ@#ºð“S1»ðÞÔöØÿã@H­ŽŽÔ³’ú^¬gÈŒ“n1´jîË•+WŸ"› AAA˜5kؽS6ùû÷ïÏ>û |o¯½öZ¶lYö¤`Ý–Ü>•ö,À´u©hÖ Ø½ë¼õÀÄ™Ÿ¢}Æ&|²ð~4ÚÄ`æ{óa{h¦i‹½_}‚UêΘóå(dí]†Ù³ÃDÕ7ê4ã'¾ãî™X®fm­Àû Z`ÖÝ¿òÍóµzd Þš0Ô¤¶/Ÿðƒ{qÙ«!ÆLœ‰qý‚í)5û(J¸/¼ð‚°À1û+p‹öîÅ©Kð|K+¬ˆÉbÕ§žÆÓ>ƺ³9BÖc>ÃGãúÁS‡ïÞƒôfDNÇþD‹™?À](§€fì[<kÏÝÀƒc¦c\ÍÜÕwviÁ‚prr\*ì”6v†ÉTþêìJÙ%m‡ã¿³ŸƒÔbÅù%/á•åQ8—`„[ÜBLÝjÀ´o?@ eöýw26؆cÞ»EÆ'6Wê  ºáÓù_¢¥ÕB8'áÓŸÁ^m6@m¸²_Ïšƒð˜A‡'|òñ„%=s§}‰]±jô8HGýǦ`â£ÀwgÀð¯iøà©ÐÇíÀ»S¶bÈœy ¯8¹ê|µb7r$*ôù&¦Œ¾ætìøá+ÌÙ|NècçãðÎ;#pí—‹ëûøžP”:ŠÊGJ+Ÿ•ç¬.™2«ƒ. ƒ÷½÷ÞÔ)S••…'Ÿ|RHcï_}õŽ=Zñ6P(Qø¡K¿‚ؘØUïN€œäø|úN¸6®Ø¤l޼Eäqb’ñçÆsÐ!»öœE²S0nlÇ"\íCo૯Bvn>ýlB{áo6IBѽs#˜Î-ÆF¸!#ðæØ¾ˆ=° ï̇D*βăÉèÿþ84ÉIÆêÏ!ÁVóë<³è.#\¶¨iµZÁÊe?`©(l˜÷ ¦6 Ó>üÎÙŽ›š«˜;ä pOøoʼnUŸãå— %/ÇÄàø®Í8. D/8˨÷Øö㻘vÒn¯â­'[ÂUH¨¨õ¿—þÊ+¯‡†ÌwÍÆÊ–-ð_|ñ…@´l1bñŸþ9&Nœ(rY-Ú47{=ÑQ‡±/ò²ÍWå[n4’b¢`vq‡‰=ýb,b®¨¡pu&=©\°Iâp`g8öíÙ‹Í+V!‚JJš@©ÆGc?ExF(>˜1JOâË ï 2/sÿó¡@¸~λváø¹ÄäHàª4ârl.Ó»í-¦,Ĩc‘#U"~ÃT|´bBǽ©zàDØ—˜LrKÙó“@¸Ÿï§ŽÄÙ]+ðÍÖd·)®ïž®ŠJ©¬‘×ü («'÷Y<3nʲ JZL鋿eÖ›E;ì)yèc3Qi²XŠÖÁÊŠ†Yc,^õº¹çAÿç>,Î#Elù ž%âüíÂIœ—_›ÉŽÆ™Hœ£B#ÿý <šñõ$àW»|ú,®S.‰‡ ¡ƒF çšqFõž{¼-.-ø’Rãû…¡—§=ºŸFެ”Š?„zG¿ãzy¢ÃÍãÿs> 2È­¹0ŠÛ.峂d¡ËSrÌ%«b>\fá1—‚ÃÂeÛiggg¼ýöÛå’Œ£®Ü¸hÄç¹Á¦MEžñ(òp†]ŒÏ_î«© ÷ĶC`zµ³PÌç__c÷gA ~±‘œ¢ÂðU%µ€%äã•YtÈ5”îšq´ËžÕÈéÓ!¦kl—Å;HdiLט¿—¹\.ä¾8>˜n±÷Ä0Œy:Ì ¯aPK%þ:`#"ò€³DJ.œ])‹êb‹#P|ÑÀÚcìñ¶l,:¥hŒÕæèÕ‚NB"C^~.n,"NGœÀ.zc2øjb7Xƶň“ % ^æ˜ô.¥ Ôn›Jˆ?DKVfÊƒÆæ,´—´óô/S‡)]·.½Û`ÔëÓñì: ‘2= õ]¢§EªÄX„‚E>Š´H‚ý•“îí˜Üõ˜Ç{ Xºt©`qíܹŒæÏŸ/ûéðž={„~xxxàÍ7ßDûöíoÛ*|êÁ‹Ô]k÷=²&£è‡t’‹¢e“ANñ:³¨àÁÁm¶j>:E–ö”O![ñ¦N>Eµ…à‘üqóÔwøpþJx… Á›ÏöCŸ­§q˜ÍḬûlRRòõåè4TÆ‹¬ òk ¯Ɍr ôÑ‚TºÒ¢£ÛRF0rú,z«©pîÜ9pG%¸ñ4oÞÌo~êÔ)ôîÝû¶®dßžûn Æ…*ˆ XŠæ”D¥4TÈa3ë É'­§XJB%Qù±ê¡Öj`.ðKPžË‹°9êi ªÙM&[°Yp¸Ø{yq,ý¶ÐøY¬\0*9a=ê!Èß°a°ON:`ËÏAB,•l"–®ˆ€m°ÓûW ›—r¢~Å£cçaÇÎÓý¤h|ø4$ê” Ùãï`Bo3‚Ýó±– {Øe 5;¨ÕQ#m.œ™æ“81º0’½I¼ "'S3¼3~Ì^ÐfÈÓ˜ah†#1Ñ8½m²°kSœp`rGZ¨Ø(D}×ÊwÃo¨ôo5+ùÒûp_Æ +9[ÍKùcÖ;9Ÿ1czõê…¤¤$Á­püøq>|ì*OJJŠÆ\ ÂuX*Ž:e> Qì…ë+߯Œµá8²c1>[õ©f(hî^Ì de 7Ò6´éѯ÷û=‚.]Äéaë6íÉ yãê_Bz÷áÏ¡g#3.Ñ7 Í1P-FbM‰&'c3áИz…¹ ¶àô>î5¼<ë( /^Ç K¨’j(ǸO1wÙŸŽ|e=Ù?6”M~A ìßq`—l—Ñ©S'!޹XYJ­ÃN&&“¦<µð©Õ9ÈsoVCÔÒÏñëÁÓ_=똧¨s-? !:bsÓ"Ž“á. ŒÝÛ¿†½Ï›´™´/µÍRô…Š”Ê«ÇQБÇñ=Ë‹s¤ OÒNÇÁüáSÏÞ.€V“‹M>Œ‘tà×°}Ø¿úÁeº"±_^‹ê5^оÁžMNÖ¸.W ø6C'ŠË:Ÿep[Á°È¶)ÚùÈoÁ¢åÛ mÐ]h ——ÎÃúc'°ú›ÿ‚q½Ì&²iI>Û7`ßñ=øáëp!šöxhÞVEφ2€æH°GÃa{T*ŽÏ{ӾߊVC&ãÇï&ˆõ¤iɲv.¦ïÅp)2†’ñb¥rÒ-—»ˈ—Y±ÌçÆükÌâ:þ¼ðÇNÕgÏž &y ƒ@¬L±  ÁW?}@+ÁÖïÈç8w4¤åsщ¬‹…fF‰ 'àÚ²3úS9HF›@O4í)äð¯PZMhÔý1QÌž™£0øÕ p÷¢¼Q!E/GýÀ زö`òsaðò&^~È—ÃfâÝ«ïÁX¾h4|]DÕ7pT‘íö¾”èZµŠŠÂæÍ›…z/^ŒÔÔTÄÇÇ î†ððð²ÿ53û‰‰“ĉ,Ä"{e[C¼³ð}´¥#Æù¼‹™+N!£±vr_b\ \*w2²l%7=úâÓgÛ›„aáÎdaRíƒ.£ÂçŸ^H7n\AŽÒâ ‹¾Þ—Æ$ò ›-‚ ÂLj,´ð::SúžycúÞr67Š'×nApè_A½¸+YåÌ'A˜Ñâ“>Éœáó0|1Þ§WâågÇ`ΆH<öÎ'x*äAüß_ÈþëIS°"ü†PNøðÅkcˆ’¯oÁÇ“f!»“Ø#&Ǿo,Ä‹t(±rêË3i"máã7ÑÚ{è 5¾5 ÏN\Dó"Ñ›R"G½"úž_ÌgRØdUÞø¿2V´ª—ùÍŠœÞ–SÖqIŸ=Û?¶r2’eþ¶ÛȶD]R‰…®ÝØhûe†“w ¼ˆpMyhZ©=¥Ï#«Ïh•ÂÝÓ“&»‰¬6-¬¤P*/wò;Rùì\Ø\=i»&…U¯Zg&ÿœ&‹3|ü=)³LÈ5@mååh`“+ §­‚¼ê =žÙóR¹¼\ÚV;¹ÃÓZËË¡¾PÿŠõ¥Ä J|MKKCHHH‰Øâ_+ƒñ믿^PhõêÕ•©¢Ä³råJ@JúÙ…Ä>K ÆàˆgO©BCž‰–.gºòGþZ¬´-Ld•yz‰ó ÎcW´ qÏ!Üîžp‘Ka1h‘›_þvõïâPtüEû^Öûš5k„ÁmééŠ+d4FAwJd`´i•»ÀÇ®ÅÙÈÖgþ]–7×,ÌÅõ¯ )ÜH/én˜¨§”@í1 ¥¤ÓÒ7Èœ ×ë!wñ†—;¹¬ŒäÖÉÓCF­·Èáïqã{EÆ«ËñûË-É­ÃpµÂÅÇ®÷ÑíZ¤t¸+è¢IFç zèºm³ ŸvAFZX­Ô†…äéAòT’Îh‡“O"”;Ñ÷l Õ\v¨HfœtËÆîo¥T†J6Pé–ÌSæwRr™ýÆ€M¸vSfΪ%õ’Ò’U#Ú …Å‹.,VJ~CÚ4—š·°TåÞ*R\VKe0f÷RÙÙ¯¿þ*,^l×À2FF?üð¹¢qÓædÑÓ_‘_fÞ–­"*’'Ýo¢v(J¶Žž”çHãÏ*VZ`E·Â½<ñˆù^îá?´olëzÿ[wG8 7ÇÒòZ¸ß1æ8”'ý{3­22ã–î]’; aüÿ‘Vu€n ¿ŠÂýŽ1Ç¡" ¸÷Ò+#3~{á.É­xìÓq9ÿ.5s_VëíM¿¢ëvì6Gyá~ǘãPžôïÍ´ÊÈŒ“î½);Þ+ŽGà>E |Sâ>4G€#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-8éÖò¼]ŽG N"ÀI·NŠš#À¨-œâããk«mÞ.G€#À¨s8yyyÕ¹Aós8ÚB€»j yÞ.G€#P'p²Ùlurà|ÐŽG 6à–nm ÎÛäpê,œtë¬èùÀ9Ú@€»juÞ&G€#Pgà–n=8G€#PpÒ­ Ôy›Ž@E€»ê¬èùÀ9Ú@€[ºµ:o“#À¨³pK·ÎŠžœ#À¨ ¸¥[¨ó69:‹€lÊ”)Ó«:úŒ¸ó¸’fD}$U-\‹ùOîÁé›îhZßÍÞ #òr4ÈÕ›á¢t.èYÆù?p"Í­H¾‚¤»þbѦàÂ…x¤e¦!-­ð/_¢‚—›Ó]o¿x¤DE"QxR5|³ä\ÃÉóiðnàùÔgÌ8]‡ÓÜ¢>döò:­jpv“]9kMž–DŸ‰ÂM‹üT…ú¥MŠÆÅØtȼýQ R›qçÏ!1pËÇùsq°xûÁÔóç ¨çeÉɦK/Ìçìm8æàèöƒ04†sa£yF œíå,YW°cÿuµ DMkYa—Iߢ#ðGøŸˆwgòSp&"Vïúð(‚OaŸªÿ­êîRŽ“a¡ï…Ž_7…Ì`Â?â‡Ä·"°lÇQôk\Nȸr+~Ùƒ[L»ãÍW¡Í^w•þ¼ C¦¡…ÜìÈQ#OCj$6l(µ­=†ã¥íQ8}KÍV}‘6"öàd×ðuSL–ê«›j2dFaÇŽó¨×ís4‡Ö*ծŞ¥p¡Ý‹xÖU¼ìkØ´ügœM3ØkñÅàñ¯¢[ KíÉÓœ‰­[¶@Ûà)Ìx»l&!ÍŸ#¿ýŠc ZʧàÅN0³k¯"lÃVš C§DÒHtòÁ`Û1ü¾å2^êÔ$XŠL6]Â1ÊwVÈ7¢¹ æR~ÒŸtp vGxcʳççÃh+8·f6¶$wÇ'Óž‚‚*•ª<‘vr 66jŽÑ=«(‹* ®ŒÌjì_þ%ŠòS(0hqŠ<Š- úá½×ûóŒ’'Zw#[öDžt@cÂÐŽÏß©³¢²wä^pq§j=]àFÊ^¸vVÔTm¦qlÓ2!žÄ VÞ€1a?ï&Âm€áï|€wžë ¤žÄ°óЀ\šôE?V­9§;BèoŒÕI´3zŽýß~ó ¾¢¿YS' g#%RNüŽÿȨQÌHÖž.np¥…ªºƒ³P§3Ü”.p®"ÎÙçw#ÂàÑà ‹ÖŒÓV„Ûî©×ð“Ç¢nbûâÕH2IkOžò†èhƒ-õ*´D"Â\Ñ'"†—…¤K)P8‹6ºöÆ5¢T kßöðéð/ŒŸ0 ÃÛÕƒRéJ±„‘‹ ä%&›KË' ò¹”f¥éâðÛþ4„ŽxMäRX­ùHMˆÁ¡í+±%žø_啇èeòTKÄl\‡tÙì;؈YßáÌ®ýP÷ÚaÜŠ9„õ€<=4/}Ðkð |’ŽýŽMçåñÚP©[°îDú??|e°¤_ÀÏ«O¡Ë˜á0üq¾íÑHsK<Ðûé1TO=š•$_ªšõÉj³ÂF“ö¾õØq,^ ÷€v2b(ZûŠ“”ÉkÇ–}¸”¦¥r 4n݃†=26…`QÇ`ÓÚmBºG£ÎèÓ*s[ØÈ‚ËÁÉÍ•“'-/@ÙñEtt³ /ã2ŽD¡zð%¼öD;˜õyxña˜¶` "®êÑ,DYKòT¢U«8œk·$褰!ûúawU¯žé1‘H—v„»ÍŒ”¿® ˜µmá}Ú1Øvÿý´0‰–©ˆ‘ ’ÝoÑï×/öw£|„|í}%‚œD¤ÅϨ=›¡•4Æ€^õaÑ唫W¬¥81‚Œíröàa4Þ± ›OÜ›Ý<`p4]´Ò»ñ®>-—È%ÔtÞ®'`Ò!'WÇèÍŽ'çã|tœú5€9ç*Ö¯Ø ß.!H?´ê£1~@S$DìÇîÃg¦!¢¹Ðù¡'0°ok8[2°ç×mÈ h —´œŽ× \òèÈÿàÁ ÷ÌNmÃòØ“ˆºe â­1rÜH´°sIYv§PTѾ¸½Cê!,Ùz ¦ Þ>b0Z+Óq`õ÷8-±œ4‹ú&’5IÈ’*i…ë5f^GVB>ÜT®0§Ã×K6áz ÿ°§ÐÚ%‡6.Áïs…ÌQ¿/·ÓãC1bpO˜’Ná—…{`•0(wÊ•³¤Ð­ðPkŒ:ÔêR6ûÓF£†ÎhBÃÐöd‰ä":Å¡âÒ¤%زy%ÊÛy\ìð]ý4“eBDGÂÊþÌÞiïK$•ÄÚË[ðýÚHUuÂð1#еÞ-#|ÖŸÍ"«Ô„¬ô³HÏw#ëȈˑ‘Èκ†Ë©(ɇ—wаסA#?èS“{h'\¶¢×Ã}¤Ðà(YQŽ™Y0F Îü6‰p}„Q#…,õÖ|¿±4_`IšÅ‰P•è7t8†ô F- ‹×•ií.–Î_ƒ‹i&„öMg±ó@LAí—ÖýPiySãp¤Õ­wmÙõÐ$‘QM»¶†Õ”Ü|rU¸{ òKHÎÜIµ%Ïú-C„1Æ\ˆœtóúÅ˰)ÂèÿW2P… ¬Md¨,øa#âÓT:òQx“ ò9ߦ÷Z\»¬s÷'ÐLfA>[À¥5&MŸoÌ£´W'ïQñ B× ’"¢ q©1't™ÉÂ"Þçñ®‘Q”KºY°Ë·yáßyÀUîD˜š–„C;÷"Ú¤Bƒ OÇ/;Éèóè€áÇ¡gg¬ÅÁd ¤Æ\ÄÄ'!êØœ2´ÁÓÏ<ñÎŽe q1·pU¹zü’ýºbÀCíaÌŠÁª•'iùV¼Ê»#KׯxM¬ 4)7„Öžñ zû[Ñ­[s„ÍÙ@v†)]´BKKS*ì¤åVF6fIÑVbs&å4á莽”!÷È´6öì‚sg üsx¶[¤9ÚlÍ0pð#ð3èÑ¡‘–lÌ€ÖÉ J³¦Àg­äåçAâÑÞR3 F3d„µMâKäd…Yìu¶à‹°‘Xˆ˜lr_´¢­udÒMÈ»!?¿f|»…ý`ýI—ïFÀ©(Ä¥g"W$ êŽ©SŸ‡?YJÝÚ¶€rÞlÞsçt‚ûÖœ§‰Ü³k&R©0É,ábœ»µE<=á;ÍÜ$ˆb²TvÁû_¾€F2+Ô­¬˜¶ô(Òr¥x È¿oo!+a[ŒÁ¿Iƒ[¹Ð.Ðÿ·`öE¦£C/3aªÄ ÷§â‰±ïÊIDATF6õÝ`»ú!vdåÀÙÝI‡÷#:ñÄÛÓñT ZT=„ÝßÍÀ¾$&{Úö¦+-O³^#X…A>¤c Ì01x(˜`6Ø1“Ê ¢\Ìg/‘IkMžòÀ¦"ÝŠJ¬£/.\¢E½K[4iî†@ê_Ä¥,ô훉hZ5T=:À‹d™f?Ê’Ðä47Øn@{óm…FÙïL-\-¸yM4X>'6icâ‰Tr9lthæðåJ¡GN}g†ùr…ÿ¨¢öÞ>\‰Äuó#¨O¬ìˆ/¨÷.¼Ød¢ëŠí':F¸6$ûvgËp&þ7Ú0üeÚ¹Ù;¤j?Ÿ¾B;W*s=ül½ñɇ#ám5ÁÐ¥>þúd þŠ¿…aìF˜ª¦~0 þ”Þ­‰ >Ÿ·‡Ïe"4XP£G^ÇûCÛÂf1‘¼f`ÙÑDä¹=ý²ùÍEf@¾¹ê‰£ý@º6 Ò1X?{2ö6C«mÑuÌkh¤„ÞDi±¥¦i/–Ó( ;›£HÂö•ÿ£#¬¤$·2h)óaQz#°1hfÅãˉ °YK´éØÏOì òi»m,Ð?-’ã(‚ao³Ðak—&©Â*"™‚ÕÑ·ÚH‘IØú+q0<Ó”R ŽŒ5ÿÔÝ$ ! ½…—ëÈ/èÿHw"\4ä~1‘߯u›@9ƒL·GÐAeñ+ÉÐ7KE’ÄÝ{¸àäñKȲú!:Õ†àa¡PØŒ°ɡе®Úì\:1oJÛÝ£`óÞ©ˆ…oÈ¥cu ­ÛÁbÐBG˜z·BWZ¾õÈç $^ˆ!g¢67: Ìc tîÓÎf²É áÑq$&ûÆáÐú_p=ÉIéB-þ4ÍåÎ"ÅÕ{ ›8Wè¶’É#zlB„Z%ÛÖRhEìk5æ!Gk Ã4êí(ÕF'4 ¤ÔÒùͪe}-‰.«­üPuK—VÔÂ`ƒs@|ú^+D=޳ç.àôáxúÛŽ.#ßÂà6½)­E©i* ¥i£ÉÁ,<›ýâÄ¢F¦–ڡÓ)Vjq%|°;-®PhÂþk&v!_ޱ㈊¾„?ãéoSc¼ðþ‹ …¬0HTÒ#â[ Tµc¦v$zÚ¾é-¨O¬ËFd3³[ 7PÑ·²ÚŠŒS.#!ПUq—ƒÃÒeÍ8p›´àÆ4š€è«”F`#¢µ¢èI žŠ÷HØ»Úvöñ“çq6RMdŠÇû×ÉãÛq*Ò—HX‚gÈ÷k1d m°úmT­@R’)[ X“‹ø vŠÁð©çI2!ådý”0Ë’ž¬yÃpÌþvŸ;0¸Úö iÜf" ŽY²i™d±‘ÉâDÓ!ŸY£”Ó¬3®â–§ÝP’gçJÈ“Êå¦Ý{%!r°P]öõPN“!CÚV¦ÚÇÁ:X[ò¤ià¦ìAxx2õÛ«ÈBÏAóvͰoWöf«›:4UÁl$Ý+Š"R„¡gCéoàÚ¦å¸Øñ-4p/ÔH†gQÝacfANDf5Ñ.Äþ]ˆ,ªÛ$ –&æ&ñÓ¡&]¥ 9Ùã BÁ»óÁæ ™™¹°5$Û•úÐí_xùaÜIã6ÎÆ¢ÃÌÕBóõKÈm&ß=é?åß¹«OgQ¼!x¸E=ì ­:Ǹssµ…s…^6'»_“ê3YÌ0ÒùI!6€Œ‰À¾Äa­Kå°Ám|„žTõ£ê¤[0hjŠ:žB‡4»Ršâ¥§GâÑgÆB¦ÅŸ/AT¢}õÛ°!¡I©iCÛ‹+Œ\F“Ž®ÑØlÄÆpJUæw â]»àµqOÒ¤"å¢ týà:¸Y~²,„‡­„®Ó óF‘¯"íø/˜óûĨehE–@Á m“Ò‹:ÊH—Mh{ùøQ}±¸šlDò¡1ë7þR4ep&ÿmqàÙ+C[ÅD/•£Ç]ŽV q[ÕÆÄîxêÃN¦«-lH©7É”¤ *–ñ±©49û€vý0µiú›ÿü{= ÿúîhN¾Ã=ë)BÑ !~2rHÉXX»B=4΂ûägyXó§ý•¢FH¶…§8Ý üE  +À§EÇP Œ›õ_Ð-("Cv|±Ä‹TÎÍšùàØ©KHÒS_ìXf&±­›äY8°vEåäI¹ëµ †íø9h dýP]Êz È:.ž½†'ž¤M;ÅÓDë±-ÛÄ‹cc‹wMË“º ÿæ­¡ìÆ‰„YÃrµÂm¦øŠß‘ï×_ŒÏ*” ++àÌpýèCtJ߉ßîÄÒ° ˜ù,ù³Y{°’U!ÈÏaßöÆÅeÀÒ²^ñ4{F&G»¼ôêã4ÊèÑY\X…ŸÃc Ñi¾GÒ%hØ£TV:@ðk¦ví i׈¬Z6e7Èwع³°Õ2VÁåÚ¤£P߉Ÿ~©äèr“°kÕäÑbøh¯¦’œ‘r\TnÄ]ÄÎ_fcÿ-jÌxùV4hÛŠ&sVü´‹üÅj\?µa„¹$NÐÄ'WRždt;³Ëò$d¥LV;Ü[ GYJG~Âö¨4d%ŸÅšG¨ê&èFÖ£…üøtì]kò”ù#X)‚:‡ÂÕjÜ_Ÿ`´pgxý!$7ØUÞÛ¤yž`X{úARôïØ™@c¿=caŒ‹ù&ůN%È…Å:æ°ãé(h6“WÑŒ€"ÌãH¼[O—æØ¿ éÇE,˜õ3NÅ^'«7—Oí¼ï6‹'[ºKöUèŽDt’ÿ ±‰Ä?gcñŒŸè J™töAÃ`V»>rV»­:»~^O÷_$èÚ™Ü9匉µgѧ—Éo¥áZNuIU·ti ª$3KB;fA4¤“ì.AÄú¥ˆpTëÓ#z5Acót9Eñ¥¤Y¥* ¢<ëŽHóSI_4!4ÕK¬×°Ïh Êþv†ÿ†…ábÅÎ-ŸÄäÑ!3iÑç™Gðךpüöó’‰!¸ç hçE–YqÚãÚ´…äÄ\#It ¾[”Í0vìcX´òüò=³p)>Œ)/Ðé)mõä'd–Û†?Ô>€êb–]y⫨ŽO›M\žÕ1‡±»phðmÕOÓ}ËP/+ù^5h5p4ú©WàÏ]«pa—زO§§ñÖ &ä6È…Yê¡*\»ä‚6Ád½g#¸UàZ,)#a=MpǘèIãÆHO›Íäeߙ²4Èbø+C±fÙlZö½Ð [äz?? =ü¤Èíæû®ãðÚïÁ¤ æèÙÙÇÏ^Àü]1ó©Ç1ö1 VþŽ%ß… åÉ•“š¢ ƒTwôÙWVWNžò€æt…®^MÅ@oZåèñÜ+È\¼ŒÚ_"´o£_ð=ýÑ4w1#—¬ÇÚ’§0Px£EKwDǚѵ]¹ØSŠm3ÙhÚˆò'R¼T*ꀓŒ…ë™ɂ¥µ#Ÿnä…•8ñËtyîœS`ù×QZÑ–ãüᓸ9d˜à2³’»`ã9“¿ä+¸ˆ„ÄLœ>M‡Î¡ áLŸšæëeM„†½ŸÇK®û±vGv‡]+hRÑêQ¼Ñφ—Æ’™é¨UÐO6fÁ•dU¢C¿ž8²ñ8~ž+Îeßö=Ñ9‹Ü×c[§ÿˆ÷©ÆÈKA› !´ô†6w沸tÉõÆ‚Û Ö^ýîÑåBéüf±;„òW5H«^ІáîãC?K4 ëÝ(èNiŒdQ8)T ŸÓ¬FnŽšüå¤YEì©ò†‚¶,t¿H樗ù.éÆŽ]®tp¢"W])ÉÎÖ ÞýrA¯¥ƒ5µ›*Òé`…9Ãɹ^|PZìþf>N·‹ÙÛ"ÏnNH­f:tcë™þü¡ DE÷)æìêYØ~«fÎ|JMr+sjQUôËÈ/QxÀ×CôuÍb¥“Õ|M.ô •ƒ[ГÏÎÉÍÞ´¹"2ÍQ“ÿJ((‹§/ÜäDÔYY‚ËEîæI?vÃX0&)<|}àlÒàV®AlŽäéãK¿”ò éZJד_Ö –|-Œf'¨üÀDbÊË·Yt`&¡ëwV…+|¼UtKÄ‚’•³§'YwZ:H¢ò4™ÙÛ*á ÌJ %åÖÜÊ‚™nTNž@òŸËðóaW¼;û%xÐ1ûpaÎ×ÃLóÈÍ¿¼ÉU¤Sg ò®My::çêåWÚèÔY:ÈÒ”*_ÁwY(²ëÈš÷%pYœÆæFïrZ<²Äb’7ɇ~?M®ôã¢eÅöhªdÂìE{ÐçixÌ×*èµ# ¨hSId‘Î3}ÑÆn÷ëÎcä´oÑK•[95w?]ìÍs¥é€N˜Ó Wï‚9¯Ñ‘žºÓŽˆô„îÑOQÓÙ¡¯žÜ‚Ž2V2Ìrò‰W”‰˜ýá k3øt@ -äê݂ɢƒ³¢X;æ¹ÂÃl :Ú+“ßJÝ™¢\Ú›äúõëÅù©´\•Œs"Ç´”&ž¶·&³}/o/[^š”@¨ù‹[¨Žf%”Î~Æê5Óê[,ÐA™\¸¿GFãm„˲”—fe˜K'\V–d"k®Tðir›¨MÖn©é¬ .MÆ¿;8ãÝÍsu*Â$TV–YéŽØä‹‘poýo kíJ ¢U-Öro~Z p/gRUk×iKJ„f$«ööùÀÒhñ³.kÖj)$\ö]”'åq°0‹t„Jʲ Õ7NGAê^ìÄ„¶ïv}° ôŸ&OÕñ L¿jÓ_DŒÎUXˆÊªÓ’ƒ(c3¼þ|WÚºhkpYI˜™˜Ý®`e )V‰2dÐí¹üèÖ 3YÝB½¥é^Ù5 )åqXE‹%W«¥[¬æ{ò £{Ñ¿ÖDWÒ²4â}À²ºéìá •“·²ézTY™x|­# UªàC·5[ä²:T§åIî/_w˜rÈ­ápM•”„~XäëåL[êìÂ,¥äûÇE1w™¹<õÏûš›$!!sJM ÍÛàp8„@µº8¢ŽG€#P>U¿2V~}<•#ÀàpÊA€[ºå€Ã“8Ž@u#À-ÝêF”×ÇàpÊA€[ºå€Ã“8Ž@u#ÀI·ºåõq8ràî…rÀáIŽG ºød-Ní$Íú„IEND®B`‚networkx-1.8.1/doc/source/developer/gitwash/pull_button.png0000664000175000017500000003113512177456333024055 0ustar aricaric00000000000000‰PNG  IHDR~\iÉÞu pHYs  šœ IDATxí]|TÅÖÿoß”MHH€ ¡„–P¤KTôå,€¢"ÊS, Oð‰øÀ§‚}€%"]zï„HR’@©›dûîwæÞ½ÉfIBB$a&¿Ý{ïÔ3ÿsæÌ™3s72ðÀàp8÷ òû¦§¼£ŽG€# À?ŽG€#pŸ!Àÿ}ÆpÞ]ŽG€#À?—ŽG€#pŸ!Àÿ}ÆpÞ]ŽG€#À?—ŽG€#pŸ! ¬ýµÛíHKKCVVVuèN¹ûP£F øûûC./y>çx•bpŒËÝßU’óìVäeÕáÿÍ›7Á”™——×­=¼ObØëyyy‚Ò(±×¯á)6‘c\,4÷lçYѬ)Ù4,ºÌ=› {Ž®Ê$H&“ 0,n8^·C¨ètŽqѸÜ˱œgEs§Z¸zl6ƒÙì~?†Ãâvãu;„ŠOçͽšÂyv+gª…Åk·x G€#Àà‡À}¯ø÷ìÙƒ©S§âÔ©Sª¡8 xyò$¾ûî;¶wß}­Zµ*—ø¸.6.O[ÑB$É´ÜÓ r)›ÉÄ»›BñúèÕ? Z}|?¢·ÌìÐøê©à¢qu'ò.=ËIXHéG.AŸÖ0[ŠäráÖïgcÇŽN,Y²Dx_è³Ï>ãŸkðññøÆ®£GÆ ËÏŒÌüàù¡SÿNð3^Ƕ½QXöÙ+PmÀˆæÞùY+ô†úïÊëê¡øË³\ÙK_,?~\ø°û‘#GB¡PN$,þÌÆÕì"†¸EØŸø úùAÉÇ~ǧ³¾GLVs<ÒYʘ¯ìÀÄ™›àR7ï†>øŒÙ'—,Æîo<5ú#¼3( fèa0¨`¡9—·ÍÃÌUI ¶l>‚õº`ÂŒOлaå½£páÂ0e‚Œ™3g‚ýgxôéÓŸ~ú©°Zb>é1cÆ iÓ¦E*(O§¥_€ˆË)óÞ…ä:á8·G|ÐåÙ ˜<Ì ó'þêg>Ää!AŸ>; Ýñù¬Ðoœ‡©áÉxýËéMÛ9³gáH¼¾Áí0tÌÛèaÙ‚.I FøpìÌŸ3W6bά¯Ä|Íû`ü„ñx´Àh‹üs &îÞ‡ýĶ?×ÞEëKÂ+î–½X4þ|aµÉdŽ#,0œÙÉ+‹åöVC‹ÁSðùȦ°Ù³ñÓоøþÚÄÛÿqÿ‡?ð$¾ú`¼ —0oÂt˜žœŠñ$WR spÒ-ÍÐ.÷Ëð‘µй³þv›Žøõxè3‘šk§‡Ñ;~ÂŒO– –òÖëú>ú`4Â|•H>ŽOg, ±ŒO5Eâ… ô›4}±ïLZ'æÌÅ &^ø‹xùÙz`ê¼ h‚«Xñål,ÙCãÙÉbM8Ö´³øvæ—XIÅù:Æ?ÿ–½? ‚ öã$̯?c{Õ+èÇ]º“ŒD•J___Lš4 ÿú׿hÜ„5 &Ož ö ËË^¢d Ÿåg|tUþ"¶¯â?Ÿ¿5ñyâÉŸ0pÜwXº&£¦<ŒÄÓ[ñ¯?G›W¨ßÃirToV|ô)ö{<†¯¦Á•å3ðïmÀÛßLA»Œmxsâ <ôág>ü6'Õ@Ù9l>œ€à.#0}ÚH4tÅ…x-w}¾_î™Ò_·nÀ Æ ºuë¢~ýú1búöí{‹Ò‡õÖ®#…Ò`4–|ó†ÓêÍÑÐ(è6yžÿà{Äf¶ÀðÑmñ×^g°“][lè?¼2ãwaÎgs xøqt”'`ýœoqѪ‚!!§îC¦McÊÄÆÁ–¨ZxçþÈL8‚Ϧm‚E)*©î»ueî/f…JÖÌàÁƒ¥ÿÞ{ï žžžŽÇ{LhžÝÏž=ÌWZ¥§Ë€ó±8¼y3ÿñ6ž Αðéø3ÝŸ”L¶¬‚Ù‡?wÇ öè$ZM¸°s¼Ñ¬^ ¦ŒŸ…£ú¾˜þÕ tɉÀ’)Ÿ"­NKtoï+Щw+ÚNàµ7æÒo‡ÑG@³ŸŸ‡2§Ø_Û ¿>òÛÞtÅ‚Ê@™Yƒ¯½öš``0…À”<›\™"aJ‚)Ï¬Ê &ùŠÃ57íâ®"êÈ&¢IÐ! €¯R´s±ˆ=o€‡§°Ö±ñ8Ÿ)ƒ— ´ÅÕV8Þ‘xk÷ìÂÖmâÇe…ÄÐ:žäÎü #Hé§ôxÓ§Œ‡âp8Æ¿ú²Rh,¼·@ #Þ膣kw òŸ)Ó@åÈ¤É ™r-­„e0gœG|ì)Ø<ìXõáH,ÙŒ?™Žñýåø}ÖXr.»æLÀÚÓzŒý|¦<«Ãæ%Ÿ`ãU Â:·Ù\@‹‡Ñ¾Q <5•g¿JtPPÞxCÔŒvß°aÃ|Ý!R~–§p0ÁHÖ?[áiµB[&xÑI0fÁh’íÈÌ0Œ›2OÇcÅg¯ÔÖÆàâ®H¶çàÌÖ]H }r6Ñ€äÓ[“†Íë#=æ("ŽlATà“÷D ÄYŽi\„«úÝ܇ÊC¬p¯+ô‰Ù*ÐÅUÌÒÙ úæ›oŸí‘GÁ+¯¼’ÿÌfköq¡³NlÇaj¤A¯6 “!”ÔCÔ¢õ¸1jò"÷ e‡/øïtòÅ íôñJ!ŽšÂøÿ}‰W[^BäŠ8Õr<>o$b}"1b P³ÏddpyC#WÀ.”©Ë?E'ï\÷lÇ‚\´Zزó\m4±r·og“n±…Yžâðr·2®yÙ^ˆû» /&à욟×*ºS¤–Y|!ÚœàÔi¦è„ÜÖ&¬µrEž|¦v¯9‡#' ¢eçÈÀ™ˆH\Ž|‡=‰Æ0iö;Ø‘†øÓû@F%!¨ƒ_ËAÚm62bøKý ?þ ( #¾ùãºù`H».ˆÍÒ¢¦‡ƒ þ#æãÓ‘Ýõ`6Ö¿¾”¬fO‡¤ôŸ< ͨOˆÔzÒde Ÿ³ðð/m È„<@Π¶soÿΛXy³É’ÉÅÃîKŠcéî¡Ás30oáO›IºZõèM}5§ R”-AnÉJÀiz$äÊHlY3°"òç÷1jî^ì8‡ÎýƒÆA:¨ ëG߃î¶Z¨‘µE¬_EøÆrr¸•ŠV $ñ1Wéú€,PêßT?M†!ý1v\wø¶¨'Ÿÿ¦_w :þþ\½—db5n¨þÀ»íM°Q^&SZ š,™®h×® K˜nq7’„Îû¥m±P?€´ÈÃüÚ…Á3ïPBK2}’G“6I©jwêMFç:,ú`*üLÑŠ©³§à<¥†Žì-ð‰¼r$PjZaY Ï4çlþ/,y&qò-[$e]Ôuß¾}'AB¾ÒW«Õ‚Òg§yÃL&“ ¸˜•%1/¿®ÌHüÆÌ}ÿ§0—üÞ è3wòSJ¿­;ŽFÎc ß~1{NìÄ¿?\ê‚ 8ø\"à`‡}H`œãÒ5‰%zf40>Š•-‰|ÚŠy–Jw-©<ëË–-1}úttëÖ ñññ‚‹çðáÃØ¿?ØqÃ7niÌÍ#)}÷‰Ó¡¨ƒúd:Ò—á­¿âصøhÚ¤níá)öU8ûäì‡D¯_«NâÒ—"Âzö@—vMÅ$Y/ô ­Œkgß²ßCC0¨[mÄŸev= ÚLgÁ!KÄ©£±ÐÖ®#_<*Jz¦Pp•.– ÕËŒA"Ó©á)(5§K—Òñ@Ë0±Mm´ëÐ×þŽÅ?_@­‡ºñ‹æ~‡}'v £¶áçU8´k ¾ßÍ~Z„(ò®¾t—n@–ÐPuáß/Áù›7ðí 1øfÃM |ó_˜7®£P>1Ý/…°fÀù³G›n, »˜ñ õK¨ „/)_QW©KcAÊÃt³òÙ‡ÝKñÅå—°EÔVÌûþG,]0Ã>\!dùD6±]4}.öœ>Žå_ÌF¥¶ «MX5G¯fNY {Ý;·’šÁÀG›Àa1 ¢ÉZ*,×RæÀf-ª¿ÅÏN¡°súlff–*ÛÈe0ÌÒgJŸ1Œ)»âÂ#Ûà3½Ú6€fKG³¡èûÅzìܰ W&}Ž9£cÒâp¼ÿNA-ê‚[²ŠXP¢&ɹÌ[´‚¶¤u.lNwÎËÂ}áM=Ù]ÚègTÌšg¾h†Sú§OŸ„ºgÏž`&Ûôeùؤ) |áº4xnÎwH{ó ü±n>X'¦öŸ¸¯µõ†5Ý!¸È‹Q(0¬Mз³"d ]H001ß¿þýÐ\kƒ©a/<¼á[§`0->BƒkGà8.ÆТVU‹o߯[Â1㥓˜²l&¾IUÐFÙû˾B[ßËð"^°¶.®Wž"ê.=¼øâ‹øá‡„REÅIi®W¥ÒiM“b–¬6WW/ŒŸ§.Ç×ïMDýv¡T”~¹ÖkÉg ÷Ž;DY½(4(ª«såy#îjŽ™¯¤á“Ÿ¦â¹e”‰pðÝh´n£Ã—/Ä?ÿWx,°j¼[ ÀˆÐŸ±|ë\LÜŒ°údÅ1¹¯‹qßÌ@ê„)˜2j8ËŠ}'`ìã‘£|›§ü†‰Ãv ñ¨ÿ(Æ j™Æ:/Ø}ßâÅZaØ;¾,¢TÌw¿î(¶ÌíæpDá÷ÅQBU2Z‹}°d.†µ­s^_|÷~Þø÷j¼?^4í‡IÁaÌAç'Á¯v¡Wûø>‹®T×aÇSèâ ‹Iô)¸ê Ö–4Æ$^+ƒ0ÃËÍܹ£^ý=…™´NѺs§ ""]ºt–ÓÌg:eÊa3—Y«Lé»ûNÝ˳g¥‡/tZ9llvž4À”ðñÓAAslnf6L4‰ØÔ¾¬Á–²b°äf"^ðõ¢}ºÏ±¨áSà »™Y9P{ù“].ýœ´Ã£¼Õäž {µ'Å›írxÓ)ªŽ‘帬¤V _“’’ßcáØÂO%áU8§ècîæ“\Ld˜Ò—VHîe =S9SnLé¨H'?læ<èsi¥M?_Âæ\dåš!W{`Ež_hi=lÒ3=à§ÓÀa3"+›|º$ÐVr—yøÀ›üòÌW*'¶9' ¹VòijiQ«€ð4ši#Í*‡Ž,cò„=YÈ#^ßvÑ–’Ô¯;Åøõ×_—ª*Õõ—_~;^ëd*/ÔðVÃîÄÏ==«•¦m–§¸|,¾:ñ«Z(þÒ cóë—ÄàÒÖÅóq8¥@€,‹Óo]ŠÜÈÒâUÙôW…ö8ÆUK…iä<+Œ{ª?ûí öÿfïçÿ¹Ë˜É0`XÜ.p¼n‡Pñéãâ±¹WS8ÏnåLµ8ÎÉftv\+£à¨Â­=½büüü„c­ìNIãU:%§qŒKÆç^Lå<»•+ÕBñßÚ-Ãàp8Å!P²iX\)Ïàp8U®ø«,ë8áŽG |pÅ_>Üx)ŽG€#PeàŠ¿Ê²ŽÎàpʇWüå×âp8U®ø«,ë8áŽG |pÅ_>Üx)ŽG€#PeàŠ¿Ê²ŽÎàpʇWüå×âp8U®ø«,ë8áŽG |pÅ_>Üx)ŽG€#PeàŠ¿Ê²ŽÎàpʇWüå×âp8U®ø«,ë8áŽG |pÅ_>Üx)ŽG€#PeàŠ¿Ê²ŽÎàpʇWüå×âp8U®ø«,ë8áŽG |pÅ_>Üx)ŽG€#PeàŠ¿Ê²ŽÎàpʇWüå×âp8U®ø«,ë8áŽG |pÅ_>Üx)ŽG€#PeàŠ¿Ê²ŽÎàpʇ€òòåËå+ÉKq8Ž@•D@Y£F*I8'š#Ààpʇwõ”7^Š#ÀàTY”‡£ÊÏ çp8²#À-þ²cÆKp8*WüUš}œxŽG€#Pv¸«§ì˜ñŽG J#À-þ*Í>NP{@)#mé°qç57 Bå"hC<á›jE@ O>ŒÇ§ÏÆ"ÓªC€:¿Ÿ9ñш¸˜ /1g.Áæ%§O_…¦V´Îþä0$ãt¤3ŸÔÙüDé&ÿÜ S݆ðWT`3k–A-•sprëNdÕl€íßg³˜Ó.áÀÎ8x"g¢/!Eoƒ.0žƒ¥nUØÕ†+§áZ®jûU¼ŒW™¼¢*ƒ@¹\=¦ÄSXµþX‘¬Ûe(^ت¢Èlé0áTø:íø2¾hä“ÅVquSM¦Ô(lÜxµ:}†Æ0Á^¦Ús°uÑ*œiõ žóÔ 7ã Ö,ùI&g-51xìkèä¹/’Žþ‚Õõcxß2¶S&¢Ü2[qyïZìHòAÍ©¢‘Âö#É‘›°z[Pw¦¿Ù õœïß~Æ!} _lH2pmýÂ0Øq¬;WÛ¶q6—_1\=Dù"„|Ã+`-â'Bâ÷þ‚-Çü0é9Tyy0ÛÅ "™…u ñÑÔAЕj¡Î>€Ÿ—{`Æ„‡ápmÈ­Wwë1þÐX¼õ¬X½Z˜Mˆ9½0t´ö§¸Š$ãQ«6âh§@<ÔÜFsÅÊxE“Ëë»÷(ŸÙ¤mÒ®/}„ÿ|ù%fÓgæ”7еž7ŽüÿIAÝv÷APz¾^ðÔT¼­¬êTÃKëuÑÊ8½ÇL: Ú,{+N¬Z*(ýVƒÆàãw_BnâÏ+o¡ŠAxbPSĬþÉŠò¬-Ê‹³¡B‰_zÄeÊ úhÆ• qB…Žë—ªÐŠü´$㲞¢Cz¢eÇgðæø‰Úª´ZOŠ$Œ<< rc¼GÓû†˜ÏÃŚϧÖp ¿íLBËaÏ J»=‰Wc°ïÏeXG¿îðñ†Î ô­{ÚÔØô—±ReŒÑk¸ºUTú>íñÆg_bþúÌ›‹F>-ñråOG`‘V'ù¬˜%­°|<|à¡®x¯ y-U rYü’ѦòöÝf†Éd…LYǾÓÌy8¹™–ã€ÎdAZÌ>¬\µ If‚Eínƒ‡¡ÿƒA°$Ãâ_N¡Í³/£[°liQøyÅNxvyC;Sæì[¶×>ÞÚSX÷—ݰmç1ä˜d¨Óêq¼<ô!x1Ì>d4Ùvº:ŠmS`Œ- ‡þ܈½—É6tµ›¡çÀAx¨¡¯˜œƒ5¿nÀ¹¤èêµCÏfy9ØòÚA–¬7NnÃÊ­‡Î kjã‘aÏâá&5…²…¿R°nõhÛ¼‚6^6䦜Çh3|zc´‚Õ˜‹WÞ‚©ó×á)±0;ôBý‹±öHÆwÒÁäb9®»bŸj…4§ c›ˆuɰ¿ŽÓñhjÁtó/œ¿nF¿šè“®!‰ÔmH›¦PÝèÐH‹¤s›°ä`"änfVÔÚbÛl8µâl#¥ß Ç@ {º/|3c±éG²º™wÛ|‹¾þg“,hÙç42G`Ó®˜|lÌ7öaáúC°wÇÐaƒÑ\›Œ]T߉ 7"¨„9ñ®’ìÔ=ŒÜ$Fèãã‘Nñm:6‡Ý’‡ì!ócÊ)Ìÿv5.“ëè©gûBgNÈϧV¸ã”ƒ+çõPw€… yF+àѧÍÂæÏD_š‹ÉËáëÒäã ®˜ÈEæ–z×ÍqˆL’Á§ËhåI´fë¥/¶ç@“ã1û«ÐÊ[Ix˜˜}›¶!Ú⃺uëB•¼?m"ƒA÷ †‚®ÁVDìú{lÂÊÅ”yñçcÛ¡tôxúèbÁ¹]?céa6ÕJZI˜bvb×y-z=ÞÁ=¬\Œ£Šd­ôry×ðáW)Êiñ‹ˆhƒ‰Š_êµ_½:Àñ(\JNEÞæS€OgL™ò"VtjÑÚ¹³°k†uëI¾mvŸ½ûúH¼tU¨B–…e_h¯^¬šî¡µà¸&*ÁoÍÂãM6ìèhG§N>g WÃCaBžU²|A½žè× Ø_ ‹I+,Nˆ,°šœ˜É‰~Ê•F)2…\°ju~þ]8…kÖGP›´^AÎâwåˆÍÔˆŽ9›Ê‡‘u–Úm†Ðú¡ %ÅìÌO6B\tiâÖ­-‡%IT¹2ꃊÌr¶*ÒÇÆÜïÖC¯m·§½Ž&¤ o^ fù”2f¹»ì’˜“p™»F¥‚ƒ6r%ß¾œìÝÌtzf›¥dQ ’ÅOQ‡2™“ hZÛ£K•wV©“&™v¢Õ@t™aaø ²ÞiZ­†™Vµž~ Ýj‰œOëaødt’®íNƒC×}ð,üì˜ÚׯÅââå4<R›úÄdL‹§ßÿ'úÔSÃÔ¥9äs>Ç¡­Çaî÷,¬Jm{|øÅ˨CLN]3>^| évÔM.½\Þ5|xÅU wn’s¢I,ü®—!ð‘Τô-ÐgèIõ©Ñ<4öÅ Åþš·n€ÝÛ£È Ô±1f4èÒÉGOâJ¢~ÉÒöéŽF¾\¶©ÎVèDÊØœ“A®<Éç)¬€IaæR$1tX¦¤6{y Î_À¯?îÀ•¸x$§çÒ ¤;6¤\K€CÓ=;Û!k¶e×ÖØ–pMhBT—®1X9ë]l A³&-ÐqÄ4 ÖžÃ|Yj:/;›êõõ°· ©PÓüÍ‚bdÙ±IΛLMþöÜÊQjÔbƒÍ ‹=‡«)7‘M´7„@ÂÄÖ ˆ>‹™GÚ'´5jÑ4–êîa‘¥bùwáTm |Š”¾9™Ù 9î¶¡!mÊ»@ äwÜ+ð¨Û€VJû…h-7Œ¥iĽ’²>;ûëë©$ \¤V&×·¦/Ô>:¤Ÿ=C+:ŒjoxÒF‡™²´ëÙj«ÙèÚ<‹wk^¾•?áêµHˆO“˜@²”4ñF‡LÓ ëi`Ô§#Ï¢B•ƒ©‘j9¹Ótº“Ò·BŸN.1ÿ`x32hû×£›ÒÉ%eä#€;²ø~¢¥" [®_O"ó• ¢œ”©ƒÄÙF’‘‚˜G<3Âî¨Û´%°}3NDDâM CûõÇ%Rü‘'N!à¢=ÛÂÓn†…&hàEBϹƒ,G)L!¸X‚p(©å*®ÍT¬¢M¹3TTÐ-º¢í¶6S)#n¤éd¹)iHæ±v¨«ÁL \<©¡®ÓŸ¼× ÇFDäœØ™>¢ý³obp¨¿Hó;;é¦x'#%n£ºÈ‹Á‚J¡$̆‡9‰În°YŒÅÉi™íoP.á™e«ŒàÓ 1Y¦çpr÷>Ü̦ oHê¼ ^B¡sœÃᇑ@„´jßr«¹Ðé)’ßl¼Ž+k–àl›7QWèSæg]ú-uÈÙ9°[¬…ÓˆÇÁ‰…á Õ‘ó¾°üIîÂU.d+)ö/«'Ъ j‡—Fwƒ—¶«›ñþüí$@20–HŸ•V~$‹ÔÏË›þ‹'Ò)^‡æ­ÂЫI-lß}V ”z'ü‰&hÖ`åűÂbi…Ã.›¸2b¸Ñ JÚ+Wõ"¹lR*¹dÕðÀ ›f$Heº bÉ”)D—²ú‹{±å2E=Ð uä ù¼yS/ ,ÖoÂeÚ@tø6G‘† ¥Úý+·ÑFk 4©UÍZë°{-"i·kÛÈÇK¾pq ‚ÏŽ u9yG; býôí ÁYR›µ²ÿ”~«¦àßS&Òæð@tmļìÔ€ÂMBHygŸC¼‘ju¶“/.£Yž„£kñ¿íÉèòس˜4cæ|:(>*Ž6mµŠBÖjÚ&¨lèM4X©.m­º‚…v6‚\[lSœ)ùªà÷oÙ"ˆjûeÒg±ÙGì“Ô×ʸú4@¨¸|ä²d:tjH.*š½‚ÑŠ Цx=Å·j ù4$eÏ%òˆz FNþÿ÷8ÑŸ‚EágD׎ÐñKê·( Ô_§kìÒ¥šÈ©F÷~JeYý.iÖÌ7!Ù%Þ5O…ß«ê¡cmò'lŶ¿ò„N³6ìæ¡ñÜ”‚lö$ôÉŠfD9Âh¬Pº3°;©oÒæÿõÃkJ-—RY~uê‘Ê’Ÿ{¬?‰$Z¥¿žÜ¶;÷ìÀöíÛ±yÕøú×$Â:¼úÚ£¤ê¢K;àÌrü¸;zCbw¯ÄdèÒ >v¬v„†Q š6at܆šÍBDÈ—Ü*XM¾ÓTcKþVÔG×ÚôÓªˆ. >ö,âoü…“;~Ág¿ž&MÖ~¼»ƒ)­¥?lFRv®_ðÓä;r›1ñgÖbKÔ5$’_5::×É«àEËuZs»…ZG)&rЊMñn‚.Á@êðgTÒ"ðËÒT¢:5òÍ,öÓJV¶C‚`—ÁîRí]¼õEhi~ 2ß¶hìk'Ÿ5£É!Íh߆Åk[¢‰¿&„¢xBû4äúvÀÖ>EÿMW©ïBÉb¾<ìÔiîø±R+ÒUªÅ–—)ÜÖ$K»ò‚ì'ÈÏ®…3±þp­S‘ðÿÎÁ’ëbƒŒö7Ø\ž‡Ø8’›ˆýX0ý: ©Ž<;+Kò#;‡Yóv#^ð;¿û7ÁˆòéÒ†ÜZtˆBZáш՘Xj¹,¢8º(§«G0S³[ ½ÒîŒgèbI©äÒÙ ¿ÜçÈâââ¤ñT&(dt ¦¦îV‹Ën³ OŸ £tÂ…NA£Fòá*½üQÇÏ“ü¹tj#+Ç)ÌäeÑÐ&¨Ž âLdȾSxÀßÏ 2K.Ò² ]?tj ÒÓôÎrrèjúCmÑ#-Ûâ¼Ï¡{Ú.±M9d¤Ô,deù8_³7æ¤Ã,÷‚mÄfÜÌ$ëŠÒi3Oã(lÎÚI ÉIñëÓÒɆ×л ¹‚5¬$ºëÐÏP‡™E§R C˜°g1~Üï‰wf½ ß)kžVºzÖƒŸÆÏ¥7csb7à?¿ŸÆ³Sÿƒn>yHËÌ«TÅÏèSh‰ÞjØé$R:;oî ¿.<‘Ѫ¦¦fÚŒÔ;¼è^EaÉpiàWSGJ›6#³íÐÑÏ>°|Ùn±¶ôã˜õÝVô7•Þ Õ„Ô p•ÃÇŸxL{.é´¡)ÀkKÀ³~„bÐ$|ðèÈ&ž°ÔÊ rµ'ísÐÆ;% 'É*û© ;mÊæA¥ó†õÓ¬‚?õßêÒgš…¬ÆÓ~„›Ý’ƒÌ<|}=pé)øþLW|>}LI©°R½T/ù’‘M}t•wI–4mP{¥•ËÊÄŠ·uo" »víZ¥ …’,7:tï`J×ê¦!ï6%µ)§ E¶ g³²e4é)ZŽËÙF,{  £MX%£×FtÎ8W2•Ô¹ÐõÇýˆ‹3§- ¿Î^ y/·òpQP2ZúSyÊf·°Rº¡ŸCØ1.Ž6ŽÿŒêˆ¼Œä²Ùá>ÖÍÁªøî˜öÏî´™^²l$þïÔཹo¡ž9‹Ü"ìlÌß$Crrã±#ª6’R $’3•’6§iXiEG’ â•­_cYd;Ìš=Ú“3×!Õk-N¶Šér©ä²˜²<úþAàŽ|üeÉfµ•L't*Ié3ÚJjÓN ßB«I§;È…!)}V–)|‹…ÎÜKX¤K°æ÷§¥Ïò*êà‰ºãú‰(ȽE׈X¹. †‡´0²eÄ Ê‚×_ìHÇ9rî¥ÏðLoÿÏ"Æ@–´P1ß9ˆŽÊDç—^@}­$rÿ>¥ÏdFƒ #¥Uú¬É™PFPúB„xêKI+\;ÑÅ&{’&{eTú¬¶RÉ%ËÈÃ}@¥Yü÷3ÊrrŸøÓ)¦|wE1`Èhð׬¡&—RF¥ý\C1¤T~4¹†jÔô†%3&½’lgzéÍß²¼ dÑQÉê´ºšäÎ4#-ßYzÇûr/! »zõjI£ì^¢•ÓÂàp8€@É+ë h€WÁàp8÷å:ÎyouSÃàp8eA€[üeA‹çåp8ÕnñW&ò.p8² À-þ² Åór8j€WüÕ€‰¼ ŽG ,pWOYÐây9Ž@5@€[üÕ€‰¼ ŽG ,ü?‡V#ú9}nýIEND®B`‚networkx-1.8.1/doc/source/developer/gitwash/branch_dropdown.png0000664000175000017500000003766712177456333024677 0ustar aricaric00000000000000‰PNG  IHDR“’€7'pîiCCPICC Profilex…TÏkAþ6n©Ð"Zk²x"IY«hEÔ6ýbk Û¶Ed3IÖn6ëî&µ¥ˆäâÑ*ÞEí¡ÿ€zðd/J…ZE(Þ«(b¡-ñÍnL¶¥êÀÎ~óÞ7ï}ovß rÒ4õ€ä ÇR¢il|BjüˆŽ¢ A4%UÛìN$Aƒsù{çØz[VÃ{ûw²w­šÒ¶š„ý@àGšÙ*°ïq Yˆ<ß¡)ÇtßãØòì9NyxÁµ+=ÄY"|@5-ÎM¸SÍ%Ó@ƒH8”õqR>œ×‹”×infÆÈ½O¦»Ìî«b¡œNö½ô~N³Þ>Â! ­?F¸žõŒÕ?âaá¤æÄ†=5ôø`·©ø5Â_M'¢TqÙ. ñ˜®ýVòJ‚p8Êda€sZHO×Lnøº‡}&ׯâwVQáygÞÔÝïEÚ¯0  š HPEa˜°P@†<14²r?#«“{2u$j»tbD±A{6Ü=·Q¤Ý<þ("q”Cµ’üAþ*¯ÉOåyùË\°ØV÷”­›šºòà;Å噹×ÓÈãsM^|•Ôv“WG–¬yz¼šì?ìW—1æ‚5Äs°ûñ-_•Ì—)ŒÅãUóêK„uZ17ߟl;=â.Ï.µÖs­‰‹7V›—gýjHû“æUùO^õñügÍÄcâ)1&vŠç!‰—Å.ñ’ØK« â`mÇ•†)Òm‘ú$Õ``š¼õ/]?[x½F õQ”ÌÒT‰÷Â*d4¹oúÛÇüä÷ŠçŸ(/làÈ™ºmSqï¡e¥ns®¿Ñ}ð¶nk£~8üX<«­R5Ÿ ¼v‡zè)˜Ó––Í9R‡,Ÿ“ºéÊbRÌPÛCRR×%×eK³™UbévØ™Ón¡9B÷ħJe“ú¯ñ°ý°Rùù¬RÙ~NÖ—úoÀ¼ýEÀx‹‰ pHYs  šœ IDATxíœEöÇiÉ9'I‚dADOÌ'æžž ¹¿zê9Ü™ã!DÁœ# I0P²’3KfÉþë[l M;Ó3³ÌìÎì¼÷ùìvOw…W¿ª~©ª«‹üaH¢IvîÜiÿ¶oß.UªT‰’Co+Š€" dEcml‘"E„?%E@PEÀ@ÌÊ„Œx(182þ:ô·" (Š@!G .eêò¡ÍSE ĬLðHP$ê™äeÍ¢(Š@!G fe(’bÅŠrH´yŠ€" (ñ"—2¡ð={öÄ[‡¦WE@(äĬLÜj®¢EcÎRÈ¡Óæ)Š€" 8âÒ :ùî`Ó£" (Š€¸” s&æò§犀" ( ³2q«¸t^Ž" (Š€âþ A¿“æÚ¸q£dggKNNŽ.Aê½§(Š@Š!“2!´å‰óPÝŽ¹sçÊ®]»¤L™2R¹re)RT·nI4ÆZž" (ÉB &eâ*G‘8¥â®%âˆ7‚"©P¡‚+®ï±$S-CPüD .ecÉR&x$,;þcOÔMŒó­KPE bž€§¬d(Êݱc‡”(Q‚S%E@P4D feâI²æL\ùiˆ¡²¬(Š@Æ#W˜+™?%Åü ó,›6mʘ÷^–/_Þ~˜¬xñ¸º-ã¹ (ÉG n©”L…KsQ$ ,FIË–-3&<Æ—.W®\i畊+*{vëi±ŒM£(ùƒ@Ú)<Iýúõó¡©…9%Ú¼uëVÙ¶m›lß½=E8S6E@‰[™$ ´XÃ\„¶ðH2•J–,)[¶lÑ—:3uh»E æ xøUà'³­¼@™É+¿t;›dŽ.-[PòŠ@Ìž‰S$î˜× #åKV¹‘êK÷ëŠWº÷ ò¯.âòL WÓµ5Š€" (‰B fÏ„ YÉ•¬Õ\ºµ}|]ªxŇ—¦Vä"—2I&+ÉÛ°êiêÔ©²lÙ2©^½ºvØav#Éd¶#¿ÊN^(¨U«Vɺuë¤ZµjR£FüjNÔzh/;%dee%Í ‰ÊD”Û·o·K·ýszàÊÒnO¤9L š/úÕ;ž1XéçxìÝØðžûËð×Åý‚n¿ŸÇüø½bÅ +£½³¶~ýz!M¹r墲•2Ê$*§q&øúë¯å–[n‘Õ«W‡r²‘ä­·Þ*gžyfèšžìEà矖wß}×.;æáâlРœ{î¹R³f͇iÆ rÏ=÷È?ÿùO©U«–L™:Ejת¼9pî¾ûn«èw¼`êè·ß~“çž{Nxà˜„d~µeö÷ß¿Üwß}jd=ùä“‚póËà;tè ÇsLTãÕ·ß~»ÜxãR±bÅÐy:u¼EÚóÊ’%Kö»^ªT)©]»¶œvÚirÐAíw/èø˜¿`¾Œ>B®¿þzÙ½{·}æ¿øâ ™?¾mÚ´±ØÓv òwÞyG®½öÚ¨ý·2IÖ7བÉv ƒ«ÿþöŒ‹.ºHš6m*³gÏ–×^{Mn»í69øàƒ¥mÛ¶ZMæO$^ß~û­ >\Ž;î8ûðò.\¸PÞ|óMyöÙg妛n’Ò¥Kh{ÙôÒK/•&MšØ~þépéÞ½»U,©òãáüä“OäüóÏYÛŽ¿*UªØ]¢™_msc/”g¦ ©cÇŽ!#ï“_}õU+ÀºvíÈškG¥J•Bé8ÇÃq؇n˜"çœsNèßQ:t¨ë(×T§xÈo¾ñ¦\xá…R¶lY»“ÈK/½$x G}´=¾÷Þ{ö›R;w¶ÆН¾úJ¢õCÌÊÄîèkB <,©N(„â¿ÿýïFÅZœ3gŽ åÐ^ÄÚýî»ïËK|À€VHqŸðÚ0Ÿyæ.ÉÈ‘#åÅ_´‚÷ꫯ¶žÏUW]%Íš5“#Ž8B† "X¨Í›7—{ï½W~øá+ˆ»tébLÆ m9Xb¤›6mšµb±À°hó{é/|úé§râ‰'Ê_ÿúW‹ÉÚµkí@cÀ;Ö~€“‡îwß‘ùóæ[ ˱W¯^Ö Þ¼y³ <ØþþòË/-.íÚµ³ô­·Þ²oï׫WÏb€bŠ7=žÖ/¬âAÑ_ãÇ·á#°£_¿þækY0T­ZUN=õTiܸ±Å:?ÿp 2zôh騩£4jØ(lÕ<Ð(œ_ýÕâx衇ÊñÇoÓ¾üòËûµ1ˆgˆ¢‡^ýu;žÏ;ï<ûûûï¿—Å‹KŸ>}¬¢ŠÔ?|pï\ÆgÇ:BÕK¿ÿþ»ÐWŒUú6? ﯂ñH˜°uëÖ¶Ý<'ôý Aƒä’K.±ß:‚/ž+žs¼D](§@/Ýë.Æ ÔG$büa±/_¾ÜŽÉ-ZÈ”)SäòË/·™~ â7Zù‘Ƴ|Ä;öüñG+0¦ygoñ’Å‚çüàƒÚ°WζûìþùçV‚=ãáÿû_Te’2«¹°.bù õvÀ BáGG3Øè€yóæÉ?þñ;(yà¡~ýúÉ| kÖ¬±}Ö¬Y‚‚˜ðÅ{Ÿ=}útûÐÛ æs \Ãr‡pù=aÂëõ0Øòå²Ë.“÷ßßÎ?À–×DäááG¢¤)sذa–?[h ÿbÁ*–4ðÆ€9ꨣdó–ÍöÃ` /1è“O>9JÂKÙ–³MPžà:iÒ$ùì³ÏlZÚLø€6ƒÿá‡nÛ÷ØcYáNaÑ¢EÖ¡켤Gx@X±x*­Zµ11]¼N„÷W\!uëÖ¬-( •†ú0

    "Š£‹9F+Ö<<Ó_ôXGâž‚Ê'¤ñìñŽý¥K—Zo¨gÍê5ö;R„ù˜+ÍÙšcsN;Iǘ&-a0È©û“g‚À£àt!¬Žë®»ÎjS<þþûßÿZkŒ°7¡æ H‹¥Ë€@;ÓÉO?õ´t;¶[\Íe`}öÙVId¯Ë¶ubñ8a{ÆgXO‡:X:u²^Vz·nÝd̘16vLÇæ1h "g©yëf p}ÚôiöayôÑG­ɵSN9ÅZ²´Í×°–°p&Nœh7¦DÀ£°P´(/Š7=V+V GâêÌŸ Ìá‡zË–+kãÞ„”xÐxò›0VP´xux|^BÈ HÒ °ùðÃmÌÖÛ6<^B ´áŠõŽ1²lé2;vQÐ`ȸB˜EêêPòSòB„8P$„¯¼òJ»Ðžò“PŠðœWÆ m=òÈ#F"câ¬â%êâYþPJ”yóÍ7[#œƒú¬Q¾<òˆUŒ=ÊÀØGG‘ø60|#gÿøˆwì#wØAcbN™çÑ)®9… ´ƒ¾€¯C9ÄŽÒùi_Ëýw"ü¦“Atj"éšk®±!—#F¡@çáa²áíÀÄwû|¡ P&¸}nÀÆÊ€ßu×]xÂ=a&ý!æ$l„x°b a5×›‡ˆ[, …—[¥ðp˜X†|ÿ–.Yj= âë @êG3 9¼C1 «0ü. Ö.ƒ™P^^ÓS/y]~0eÒë‰%JóÍ­K^¼Ä^e’(¬|„ý _ŒÂI/áíxåÈä>cŒ1 o„œRqí£pÚFè•g„eˆ×G_áEsN™`ÌüÆK´þòS'9„÷ÈXtŠ„þÌoB ¥ý´÷Øcbö`åmåÏ‹‘»¿œ;r×Ië'> NÖ P"È LŒh}„²aìcˆºgÐk¹S§—ÇÇhå7lØ0p<»²ò2öáïs×î]GÊ#÷çÅÊyYà.| w϶?cJ¾¸•‰·²Dž;€Q&ƒƒ<þxxî2žùÓ6¡ˆáˆô ‡£H×yˆø CNPsŽq“ØÎJ¤x…@üyù±7"üK^ð ›GyËe=ñÄV0£ à …ÃuÈ¥åèŒ ”"ír÷P XÒî·ÍèÉoz—Ÿ#eºr ½áñMž^~ß´Á‘ÿ¾÷º÷×ù ž½{÷¶8º´”Ç|ÂY=Ë›A}ÄóëúÊ= ŒsGÞ:9çÏËo´1@9ÑÆ³+7–´^¾xŽáu×Î]–/”¸“WŽoÒpCØym´LŠÝgºrÝ1e”‰c(ÇŸ~úÉ®æ"v>jÔ(ÛñhYbÁ(„>ƒÂeuôÍ·ßØS47€cíB.$Ã9e8ò -,,ÈÅø9g¾Ëåá‡yBX”,‹„p½Ĉó“@(Q&eYä„<¸p<¡ ÔÞû(l”8¹PùÜ äòÿÞ{ußÿ}ÿï})ß¡y@YA¨‹‡Åsçε #ÂçJþUÂO„By9k°Ä v!ÆýÏÃê¬p—ž#« â¦åág\/VÜzb”G¹AýƒPpäÇ—0$áN„ Pýó._²(’Í›6ÿ©÷l1öÀBQãÄáæ£Jf•´XõÏ Ï†×ëÁâwßhc9,ˆe<Ç3öyV1&\¨ ~ ï3æ¼Þ(ãŒPŠÒÆ“öî·k«÷X('à±ò°´š•¬’âý„;î¸Ã¶IO&–±Ä™HgÒwÜiï39'Ä%fÐzâ”Q4"NN§ñp“‡ú™`åE0A¡!À6ðÅRE,i¯…Tšˆ?ê8ýôÓ­À£m64bÆL¦·oßÞ®Va²›ÁÅ*ê% BEƒwá,4Êã!ó:Ç+÷!ïýxÓ“üxà ü·ß~ÛZ±ô• cbqƒ§«;ÙGËŒùçêÁ3eþ„9;G-ðk.á0Þ‹C°ð¶²ðrè”mbÜ’î—_~±á„^PPÿxC˜Ž?ŽœxV>þøãgéM—Ìs‡ Çpõð2e…ãƒO þÂQ¸²\ŽÞûn1^_´>ÂdŒ£ ¬+3¿Ñʧ?ƒÆ³w|Ä;ö1°Y!‡ÂƒwŒj<Vñ1~Q<ãŒ'ƌÈëÉàï®ù…Ò3V[ð"+¶ bVpõíÛ×þæ&<b×üAXŒXjŽ˜(EÑ`‰0)Èd3öA„åÁzuêG C(·§žzÊZòL¼b9³(à…^ÝÇkÇü&„ …ÇëBVĬ/¾øbÏÇCé°ö!‰ÐÆ#¹à‚ ösáó‹w;óPXTXX=ô´”ï1QZ†x¿6á –•²„aÀþ·¿ýÍ „Cµ aG{ÜL¨†)G” æ@û‡pŠï£>²Ïu¥1Y BxÌx–°Ú½ÞÖòéBÒ(”EP¹À äYày¥›6k*+–¯°lDã7Ú Ü4žÃXÇ>ólxì($¥rñ%ËË/½òl)Ÿ±äBsm´‘ª×(±xþ1&êˆ! Z‹#…¹Ø¿§œ:Eà;—?ZAhUB±’[bÈ` ?ï´‰X4&á0?Ñf “l±òItá :޲vå£å±°.öíî.Δ.Þ{X§( Ú‹B[ùí<W¿Qؼ(ÆÄX€gøD!xq便m\£lÚèúž¶ƒ%a /Eêêd ;üÈãú€çʉŒ0uõxËNÖ9ø¸~ŒTü’¼vîÚ)%Š—°ž·mŒ=;½eÚü&¯l€%áÆ&ý­¸O:êbá J˜Hƒë ~á'ZùAãÙ?>‚ÒzÛÎØ|üñÇ­± †Ž(o‹öóŒÒ.×ÿxÍÈ1^øöŽ—× ½2q -LÇd)‡‘{¼ƒÍÝãÈC€‚D ;¥á½ŸŸçðâ„ õFã=?y‹¥.‚¥Ÿümóßô;•ú'rÁ—Ÿã.ZÁ«ÁðêYší”‰kc4~ƒÊÏþñ”Öñ‘ ƒ7Ú½Ï÷(lÝuŽ(^ÀÄ3ÆpŒD)3g‰A½žÿ Ø")¸a€q??èH(¸AïîGãÝ¥K•#8†S$ðço[¬<§RÿÄÊs<éò{ÜEë#ú!ì^Rô·%¿AågÿøJë剗jyg¯ØO”á-„°ÞIz>~§Œgâbxá˜ô^#„O˜Ë›·°œã™Y…¥ÚE  \M8Ý-÷OuÞ™ÛeN¥Cˆ–ðiÊ ø ë=E@P’ó ü¥ Å¢Hh sYüE£˜” ±¹dS4WÐÕOü·8c&n(+^™ˆ‘¶YPò´›3Á$Ô•©ÄªüP¯¶[Pò†@Lž‰·h&š’AÞIŸ ò‰Ý!PÙÒ€ódñÄCAÜÃ#¡Ý,±e",V¼ ‚W­SP2¸•I*@„eŽ2qï.¤OùÁíF‘Ä¿Ì~´E@P1+<€ ·]ùqD˜²¢IIPE 5ˆY™$›] Û$a-_Pä!vðɃBKVE@È+ªLòŠœæSE@! a®z¢(Š€"WâR&¼0è^šËk…‘ò黑Ñ늀" ¤>q)“d6‡)•E@PÒ”Q&[Í^úJŠ€" (é‰@ÌÊ„0ïš$ëóðÞLOP•kE@P2 ]Í•i=®íUE Äì™$¡îý‹4\RRE@HORF™øUÉÌ™3ÓQå:fZµjsZM¨(©@Ê(“p0qÄá.ëµB€Àĉ A+´ Š€"àHe¢a.×'™sÔ>Ïœ¾Ö–zRF™øÃ\…ym hŸë P )£LÌמ ªÚ’ØÐ> 'M¥¤)£LT•¤ÁhI0‹Úç T‹S ´~ÏdÙ²eIû`ßWÙ¹sgvMøª“ÉׯMí§Ã×,’²Ÿ Þ´i“ìù#9¯¾Rn²ö¤‹„u¬×ùP\ÐGërrr’†K¬<t:ú/£‚毰ԟ2ž‰ì‰]|ôÑG2dÈá­|«¾þóŸÿØoÂuÌèÑ£åÝwß•Ç\Þ~ûm¹ì²Ë"&3fŒ|øá‡òÌ3ÏDL3mÚ4¹ôÒK¥bÅŠ6 ÊçÐC•x@Ê–-1ß܈…¯¼–ËÍ·È£>j³7NÞ|óM»ãÁ)§œ"'t’L˜0Á~*ù¢‹.Êkûç‹£Ï÷ϸï×¼ dÊ”)Â&¤|ʹN:Òõ裥¸Ù­!ˆ-^,³gÏ–îݻ˜9s¤UË–“/1içüö›œÐ£GÄ4ú©¬_·Î*Û]»wK‰â{­¦‡4“NG1ßÞøâ‹/¤{·nÂó³rÕ*™ùË/ætmOÚµeé’%²ËŒË-ZhU É¿pÑ"yèᇥ|ùò+”tÓ¦MåÒK.‘R%K&¤!ÓÌøøöÛoåꫯöß ý^¶|¹<ñä“òðƒ†®qÂØúò믥ÿµ×îwÝýض}»|óÍ7Òã¸ãÜ¥Œ=¦Œ2ÙS$¶>àAyöÙgeðàÁR·n]ùý÷ßåú믗×^{MúöíXȱÇkÏf³Ø«¯¾¨L òܬ^½ºŒ5Ê^A˜!x?5‚åœsÎñ¤JýÓ#FH³fͤtéÒ²È<ðwÝu—ôë×OÚ´i#wÜq‡”*Uʶíì³Ï–SO=5¤@¤e±öy¤:þØó‡üôÓOÒ«W/)W¾œälÍ‘Ï?ÿ\~5KZµ~‡¥Ž;5jÖ”FÐN›>]Z´Š¬L\8.ˆßÞ§ô¶l®Ë^'#GŽ”ó/8?ÄvìfR(KL' ,”*UªHÑâÅdkÎ6ùâË/¥K—.Ö@ñŸ|òÉÒ¨qcùä“O¤Q“&’U2+¦r“™,áùž{ï±Õlß¶]î¹ç™8i’t9ºKRªú.¨ÿ"õqËÖ­¥‰Qv‘òælÛ&cÆŽ•î=T™¤Œ21¾xL OdãÆÖ%C5äÎ;ïÜýþýûË%ÆÂ9ì°ÃäÖ[o•ÚÆ2ãf’¬ÇsŒü Œ5»aÃ0`€<ñÄò¤±H>þøc)W®œ\pÁÒ§OË õ\yå•ÖrmmV{VVä²L™2RµjUë!ñ1eÔ¯__n¸á«Q4P·nÝlýXLrøùÅX•XÔSÒXiñòõÞ{ïYPØ_þò¹ÖXS¸÷à3ÝÌFYѶm[˃÷ß°aÃäþûï·—Ð]»vµ^°Þ¿4‚ª‡±Ìñßÿý„(âXûÜ˧÷|»y·™?k‡˜ñSÚ(¼Î:ÉncpŒ÷¹ðR$ããkc9–3žbûöíe‰±ÔW¤žQ& .”õwBAã?o-üÉÆýÍx!%J”Ö&?V3‹C¶ t옱²fÍ[f7c˜à ý‰Ü¢ßxža^Â¥\rÔ õ`É;eêT™;w®”7c¯R•ÊR»Vm©_¯ž|ûÝwò»ñ4*U®,mBg\ùiÆŒÒÕ(p\°pT6iñF šFQ®Z¹Rš%R«v-™kênà}ùËNÚoðá/Ÿ’æyªP¡‚õ$çÎùM~4ÏÍÖ-[¤ºé·ÓŒÑ‚ þá‡,;í̸ÅX›mŒ…‰fŒ’O§Á³Ÿ‰27}ÆóõÝ÷ß[ãç8ã-téÜÙæÅÐø¿²téRûô5Ò‡(B¿ÍŸ7O~ž|¸~øáò·+þfÃqÔóòË/Ë;ï¼#„ÜæÏŸo"¡+"a¥Å&¤B=ñòµÎ„Xžþyyúé§m»Æ/ÔO= vÚwá…Ê /¼ào‚ 7 dy(¡3Ï<Ó† 9§}à‡"Àñ;#è"ýqþùçï÷Gh1yû;/ç%ò8äCäŽkÂrÓM•Èʲ‚·œ £ºØc„Ärs$¬EKL?Ò÷Ûñ±Éà‚ÒÄC¹¯[¿Þ ‹ÓO?]N0Âbª c²"ß*#ØQNÜc~f…Ô‘x¦­Þ{X¯x{„ {9Ïôû3©ë½{÷–Æš3{ŽUŽHŒ£ÓÏ8Ãðá-sÚµÞä/oÚÂï ë7Øv¹tè5Ùkí½J+Y,ܽ‚>n6Jà%óð÷ð#عÉö&Á¸ýöÛmH‹ˆý ñ¬üñò¯ýKV¯^-³rõã@:ÿµ­F…‚árÛm·Ù±2ÊFÑ:™€žÍ†Æø©`Œ†$kŒÀç9oož#ÛGæ™`Â߯G3ó,P&ccµñN½8üa PKþ¾ä·¡º&?slF¤½Ü„€CùI._†]KeB—yÿèŸpDXÁ c"K˜‰µ¡C‡ÚÁÉŠuƒG€€þépûÐáÑ„#p¬8G•*U m³oîÜLö>òJŒ¿3Œ%I=÷ocIDATX˜9ä-w–qͱøñp˜‡p^é˜p„"£žxùÂú&4ƒõÄ °®Q&o¼ñ†Ð®GyTþnN?ºñZL µÑ&÷%̬Yûñë/ «Ÿ›#æŽà+yû;/çX¢sgYÖ(ÂæÍ›[ï‰'Æa¼ªåF(æ!4ÅÂ,i G®G9âqÐŸŽ—,„uîD>Ɔ»nÇ‚Iï~û”í½–m¼ÆÍ¢ʯf”+}áY5å»´(Î+šû'%³JÚ¹„qÆ£uiÜ‹ÙËž–»7DˆŒßE̘¢ܽ‚> !7þŽ:ê(;Çœ „!âø[j¢÷›…5kAаaCÛ‡î^‰¬}í¡}\g—öä/cúÌá[Ù„]^\èÜ]ãy»s®×4ã‡üá ?c Cï}wžÉÇ”Q&Òìô`ÂÂ'¬ãˆ‰o„0ƒ‚°õeãJwìØÑ%  …AÄUQ>ü¦>Â8/Ä`fnPŸðH˜e±|!üƒ(^¾˜„†ÿ‹/¾Ø†æX˜ÀüÒ+¯¼bC×\sUÌËBñü’Þ-Ÿ$Gˆ¼i/¡¬ÃµÏ›†¸6 é:çÆ¬½÷Cç °Ü&K;aÉܲX¹„-jþ°ö±&kÕ¨i½ ,\,L—Ö­‘`òãÍÑ/Xž; F„)ϦƒéXùõ¥e5ó XÍ5’C©PV£4»R^ޞЬ_µ^s%=WH˜eñ<¼õW49Þµ¨MYK+M~WÆHUãA‘g“ñJìŠÃXùOf:×ù¹ul2 ¦ OÖ6Âڶσý…Â9Ù'MÍ—Xy¾B0çâ㳉FüÊj6ƒc‚žÌ«ÙtÞôž:üe„ýmÒaB½ÌŸœdž³ëqÅsÀ!½ëƒ°y}<æ4)3F…NÄ­™ÇJ@€bm2p Üf‚ k•Hݺu³÷Ü?&eÉ{ã7Êÿû_aòKrÓò‰•xЙØw„ð½ûî»í¤½»Æ‘x9s#mæx°ü™û G”_„ǨeHùLÀ^wÝuVIsF0N >ÿ";qk„,á.pC¹âA¡Ð!«nxˆì¤´»æˆR'¼G8(ˆbíóHe`-ÂËX£üè;ÚQÁ¡ìÚ¦MkŒU[ÁxKx›¿úJXÅÅ=÷WÊx)ðû¥¹G>¼,OÊa1“´ŽOïÑå÷óæMãî1áŽ1Á¼Ka,á¥7Åò‘éöæw¼ÄCð³@Æ T˜gs–ãïš•›ÿ8ÐâÆØÄKÃ(#KçÎ[Åó¹™+¼×„tÁ“hýëî»ôÔë®9øÍ\ÎÆûp„@Ö@”Í ÒL™Wû26[ÌݽlžeæO2™Š«Ì‹q`µLœØK„/ˆ©“Ô„¢P ñÍÆ²sŠƒø:$B)Y„°³1{c¥BxÑn¼|1G”½6[š°€—°Tb„RÂÑ‹/¾hç{XD„{ì1ëy¥‹vÕn›9¥D!-ÆŠ!^²´ï†äæÅ@aþðI¢ˆ:ìx3 " µÛ%êE) üX,Ñ¡Cëe†y„a¤1ù‹ñ`𜼫óàEè 2Y|LîBÊM'Bù²P/W‘]˜‹iPº¼Ü[m¼#d•wœÑ·,ÀÈdJžäŒÕ¼h4&¸óJ„Bœ"¡ ,ùdBPŠ£hŠ„tñòÅd?~Âú "ÂcxMXµ~ÏÅå#„GˆŽV"(/}©^,D(Oeá[Üü¹¼ÄÝó\–ÍæŸ)¿¬p®„ý#(?7K_±¸12ðBÜ<%ryìÏ?Þ Âãii¼R7çåçkŸù"g¡{²§Å)sy/†‘ÀœäòDJ——ënîu¿²QÞ¹P^Ê,,yRÆ3!ì%ÞlŽæ™xÓëù#@ü¾|…òa•¥³¼’…‘¼›x8À3ibB1™NL¤£HPàýhoîûñÂÛ!Ü ¦pDˆ -ÞrÕ¥× RÆ3 bRïå(Š Â‚VJ,eŒEÛø”j´m{X:«¤ä)£LrWyçG›µŽA@ûmOæ"2ÊÄ¿-zæv‰¶¼0# ã¼0÷nf·M'àsûÛÎݲc×î ©ÀC •+Š@Ú" ÊÄtÝ>\ g>8EÞþÎ|ʳ€è@xÈ1Šð¹±ËdÊ"óÑ%%E@P ŒW&»vï‘g­“ÛU’ží‚?攬þ9P¶ïüC>ú~¥üºlK²XÔrE@D eæL¹ôÜœ´`£¼0z©tjQY¾ž™-eJ“Kº×‘W¾X!«7l—>G×–Ó«.~¼PVdo—Ç.ij>áZTî|g¾ùfE¹ý¬ý·=çúŽ]Èü•[åû¹›ä¤v%d )âo¤D±"Ò¾I¹¾×A2mñ&8r‰t¦Þ_²%«D19¿k-éb”ôñO«å£V‰ÑMÒºayéß«ž<>bIžx8¥CI2a¹|>m­1ewm]E.ï^›/º†åí!ÓVh¤á¡nÕR’eø~qìRù¿SJ‹:emY?ÍÛ(û5—Ÿ ~ÏŽZ"½:T·é¸°™åÝ_˜))Š€"+i'16çì’%«sd²ŽšT”¹Ë·È}oÏ•VõËá_T†Œ^bç>Õ(#s–n–_—æÈº-;eÒìuÒ F©?áÒóÐjöZS#t[×/+ßÌÙ(£ŒP>¡}59¾}U5iµ|;gƒlÙ¾ÇÖ;vÊéÖºª¬ß¸C†_nófoÚ)Ï\l¾QTŽ1‚ÂÔ5F@¯•¼òð«iÓ»_¯ÎÍ+K÷¶æS¬ß®”wX‘·ãLPsƒA“êæ£KÛv˲5Û$gÇÞ9 5›vȲÕ[mšÍÛöØ{CÇ.‘ꕲdÑšœ°uÙÄúOPH;Ïĵë‚cjË‘Æk˜0}4«WNþÖ£®ùÌjyÿ›²ÙþãÛT–aã–E°^êW+e¿ºÖ£Ueylø"Y¸*Çsá±uäÈÆ{¿÷@šFÕKKm#`ï:¯©Ì1}ê‚Í6ÝãµR{ï×÷ú_Wºµ¬"h”ÎÎ]{ä‹Ùëmù\ØT*—-!‡™2³Š•š³òÄÃ3Æ3Â+*™µW×—*QT~0í¸ÿ¼&ay;ýð½_>lT£´Ô­RJf-ß«8VáŽýzd=¸HuõéX#\6½¦(Š@XÒV™/¶·=|n2º$DUŒPoÑ ¼Lš»AV/âàºe­ ­P¦¸T,··Ù%Kx2äæ7c 4 §s«*Ò±y%™±pc¨LNʕޛ7·J{oµ)*•+üëV.e¾Ã-’W–fo³ùwïÙû¾t¯#jHµrY7Ë„ùGÝÛîŒ0žŸêUÙû-øHuùÓëoE@P‚È;AIÒïžSÝMøÇ†Ä~[/Çoº¼{]¹·ÏÁö¯ÝAþ ÝÌ%›%sCïƒd§Y% uü±½îÿ‡…ùøãK8>R,É>|¸¼ÿþû2mÚ4+Ô‚ê3gŽ|øá‡òüóÏË=÷Üc…ßܹs÷«ö†n°ó$ÿú׿„ƒ¡T´Ÿ~ú©T¬XQn¹å›>''G¾ûî;+L/½ìÒýÊj_P›¨ ºï¾ûìüQ¤¶{+‹µ?‚ê¥ ”&ŸiFÁV¯^ÝâN=AùPxàtÉ%—|èС2sæLË^P?ÄÊs´2"ñìűT©}¯>xqKÌL˜·ÄžoØðç7·X¼¥ä;'Ÿ|²ìÚµK7n,S¦L ÕÏDùÝwßm…îá‡nçð0 |PæÏŸ/Ó§O—5jÈu×]g¯ß|óÍr 'X‹û›o¾‘&MšXËž›Ë–-<?î!žŸÞUW]%>ú¨ìÞ½[˜ç(W®œ¼òÊ+ë]ºt©0±¾yóféÚµ« 6L*UÚ»“¶«»AƒRļÿÔ¨Q#yõÕW¥yóæ‚¡hhBBÙ 8PªUÛ»«½hþµ/¨M 6´E€7ÞQ¤¶»zÜ1–þhÖ¬Y`y,6 OK”(!}ûõ•»î¼ËÄ/íìÔ©“œuÖY6-ž˜B}ôQÄ~à~,<!ʈijÇ¢E÷÷A+(È”V&uëÖ¥}JŠ@¡@Á€2ÁÊ®\¹ò~³J•*¡UNŸÆM‡ÚÌÃË2Ë’%Kä˜cŽ ÝãA^³fµèÛµkºN)œ2)S¦ŒÌ›7Ozè¡°|\xá…ÖëA± ä¡ z áŒ;V.¸à+è_]{íµ!>ü'(¹V­Z….תUË A¼2ˆzýŠ„ëx,‘Ú­Mä‡bMGÚXú#Zyx]ô%T¡|«Ì8ʇ‡zÐAûÞ‹;ì°ÃBÊ$¨(7ž£•‰gÊDx}„USZ™Db^¯+éŠBaúöÛoÛp’k‡ßÚs×½G¼„C=ÔZîî:}ÕªU¥víÚ²`ÁwÙ*€ÐÏ a,Âk/¼ðBX>=±)«¥~üñG9òÈ#­w©^Â2wÞy§ ]?Þ†rš6mjçP<Õ†Nñ¼|²º+++ËzjS§NµL(±ç$¨}ÑÚ䊉5écéhåE*#Z¾Õ«W;–mèÓýêpŒTŸËÏ1exËãœþçoÅŸJ+Š@B@‘àM0ÉïNFGu<ʆºÖ¯_o­ùqãÆIŸ>}¬Á‹˜Ö­_g=–›nºIðP„†‚êE)ÞqÇR¾|y;¿Ò¦Mñ C?ðùÓO?Ù¹îŠ*š j_P›¯A´#(M翼–”,&Mšd%:bÞQP?¸4ÑŽy-Ëc¤:Ô3‰„Œ^W’€±pbæ=zô°“Ü#FŒ°žE,UuîÔYN=íT» KI×{ï½×fe¢uëÖrÊ)§Ø0QûöíÃÉ}„ãå„¢éÝ»· «¡X„r‰T/ó:Ì© ìñ 3Ï<3lÝ\d¾¶mÛÚps<(žÇ{,bzw#¨}Am"ìÖ¡C»2й‡Hm']¼T¯Ã"\™AùXÅÅ”9ÆFË–-CáÏ þWO¸ky-ƒÅÇO>ùÄz8þòSz£Gâ¾JŠ@aA`âĉvN€˜8±tBT™ôŒ‡˜#aþåC±Š7/á)VÛÔ¬¹w×jï=ï9ó&ñò©^¬~ sÑêu<À?íG¡¸yw/èÔ¾ 6áuU®´wçð tAuGº—×òÂåþX¬€70räH»êíÙgŸ U©B b8Ék^ýÕ¨2ñ#¢¿$!€2Ñ]ƒ“n!)–¹óÎ;Oúöík'ÔQ"ýúõ³ï¥zuÎ$Báb&b –_ûTRE „/ ûá¹a|ðbª["-oAß/4Ê„5ØÄ¡Eï¼óŽPŒµ¼Hõó1/J%ŠX…Ö¬ÅWR‡s¼ÈÉ;*‰”iÉFªÐ(“d• åó¶1oDzmÛo()Š€"*¤2a©1D^–âKŒwÝuןÂH¼DÓºMka™"ÛQ8BÛ3±Åª¯eÏË]¬áVÞúuÄD;¯2QÉ &!ÒP yä—³»L[¯(Š@ê! Ê$õúD9RE íPe’v]¦ +Š€"z¨2I½>QŽE@H;RzžuùJŠ€" (©@Ê*ïNÀ~_{í5»™c:u„½¯øo³Y"/3®^½Ú~÷cðàÁv3ÇpéyKý¡‡’§Ÿ~ÚÏöðl¥Â^XŽxÙ‘íæù ”- !C†ØwYØÅ÷Í7ß´o¹³E oÎwíÚUÖ®]kßʇöô‚"¥µ7=ÿàgÆŒÂ6úlX9zôh»zmÓ¦Mv;·q¤'Khv涘aëÞ̇¶‘©U«–}ÁrêÔ©òüóÏ[þƒîÅZ¾K «¡C‡†mËÊ•+åöÛoaÌv8¼XÚ¿[4ßoé}JoéÜ©sD ÁkРAv7Þ1¢/øôéÑGmËp;$„ÃŒ‡n¿¥Î§(‹±P¬X±ˆõ¹6ëQPö!²oÀïc1ü›;òÀ—+W.|ßÕxÓû²Gü¹ný:Y—½Î¾ íÅÂXÓ¢8Ü‘¼åÏ_l:I{#ŸùäåLÒ Lá¥dÉ’69›O–*]J*WúóK™A÷¼u•ïMtk[‚ʇ!ŸX¸p¡\wÝuv_5ppJÜ•Žv#@ù¸—UGŽ)(·Ë/¿Üe“põ…nê‰" „H[ejžd<O<ñ„ÝÆŸ}Óâ¡1cÆX¯=Üð&ù ^ ^œ’" ć@ʆ¹âk†¦Îdš·h.íÚ¶‹B’ACüû¶‡õÞâ.X3(ˆ€z&ØéÚdE@P€. N4¢Zž" (ˆ€*“ ìtm²" (‰F@•I¢ÕòE@È@T™d`§k“E@H4ªL¨–§(Š@" Ê$;]›¬(Š@¢Pe’hDµ= 1.7 you can ensure that the link is correctly set by using the ``--set-upstream`` option:: git push --set-upstream origin my-new-feature From now on git will know that ``my-new-feature`` is related to the ``my-new-feature`` branch in the github repo. .. _edit-flow: The editing workflow ==================== Overview -------- :: # hack hack git add my_new_file git commit -am 'NF - some message' git push In more detail -------------- #. Make some changes #. See which files have changed with ``git status`` (see `git status`_). You'll see a listing like this one:: # On branch ny-new-feature # Changed but not updated: # (use "git add ..." to update what will be committed) # (use "git checkout -- ..." to discard changes in working directory) # # modified: README # # Untracked files: # (use "git add ..." to include in what will be committed) # # INSTALL no changes added to commit (use "git add" and/or "git commit -a") #. Check what the actual changes are with ``git diff`` (`git diff`_). #. Add any new files to version control ``git add new_file_name`` (see `git add`_). #. To commit all modified files into the local copy of your repo,, do ``git commit -am 'A commit message'``. Note the ``-am`` options to ``commit``. The ``m`` flag just signals that you're going to type a message on the command line. The ``a`` flag |emdash| you can just take on faith |emdash| or see `why the -a flag?`_ |emdash| and the helpful use-case description in the `tangled working copy problem`_. The `git commit`_ manual page might also be useful. #. To push the changes up to your forked repo on github, do a ``git push`` (see `git push`_). Ask for your changes to be reviewed or merged ============================================= When you are ready to ask for someone to review your code and consider a merge: #. Go to the URL of your forked repo, say ``http://github.com/your-user-name/networkx``. #. Use the 'Switch Branches' dropdown menu near the top left of the page to select the branch with your changes: .. image:: branch_dropdown.png #. Click on the 'Pull request' button: .. image:: pull_button.png Enter a title for the set of changes, and some explanation of what you've done. Say if there is anything you'd like particular attention for - like a complicated change or some code you are not happy with. If you don't think your request is ready to be merged, just say so in your pull request message. This is still a good way of getting some preliminary code review. Some other things you might want to do ====================================== Delete a branch on github ------------------------- :: git checkout master # delete branch locally git branch -D my-unwanted-branch # delete branch on github git push origin :my-unwanted-branch (Note the colon ``:`` before ``test-branch``. See also: http://github.com/guides/remove-a-remote-branch Several people sharing a single repository ------------------------------------------ If you want to work on some stuff with other people, where you are all committing into the same repository, or even the same branch, then just share it via github. First fork networkx into your account, as from :ref:`forking`. Then, go to your forked repository github page, say ``http://github.com/your-user-name/networkx`` Click on the 'Admin' button, and add anyone else to the repo as a collaborator: .. image:: pull_button.png Now all those people can do:: git clone git@githhub.com:your-user-name/networkx.git Remember that links starting with ``git@`` use the ssh protocol and are read-write; links starting with ``git://`` are read-only. Your collaborators can then commit directly into that repo with the usual:: git commit -am 'ENH - much better code' git push origin master # pushes directly into your repo Explore your repository ----------------------- To see a graphical representation of the repository branches and commits:: gitk --all To see a linear list of commits for this branch:: git log You can also look at the `network graph visualizer`_ for your github repo. Finally the :ref:`fancy-log` ``lg`` alias will give you a reasonable text-based graph of the repository. .. _rebase-on-trunk: Rebasing on trunk ----------------- Let's say you thought of some work you'd like to do. You :ref:`update-mirror-trunk` and :ref:`make-feature-branch` called ``cool-feature``. At this stage trunk is at some commit, let's call it E. Now you make some new commits on your ``cool-feature`` branch, let's call them A, B, C. Maybe your changes take a while, or you come back to them after a while. In the meantime, trunk has progressed from commit E to commit (say) G:: A---B---C cool-feature / D---E---F---G trunk At this stage you consider merging trunk into your feature branch, and you remember that this here page sternly advises you not to do that, because the history will get messy. Most of the time you can just ask for a review, and not worry that trunk has got a little ahead. But sometimes, the changes in trunk might affect your changes, and you need to harmonize them. In this situation you may prefer to do a rebase. rebase takes your changes (A, B, C) and replays them as if they had been made to the current state of ``trunk``. In other words, in this case, it takes the changes represented by A, B, C and replays them on top of G. After the rebase, your history will look like this:: A'--B'--C' cool-feature / D---E---F---G trunk See `rebase without tears`_ for more detail. To do a rebase on trunk:: # Update the mirror of trunk git fetch upstream # go to the feature branch git checkout cool-feature # make a backup in case you mess up git branch tmp cool-feature # rebase cool-feature onto trunk git rebase --onto upstream/master upstream/master cool-feature In this situation, where you are already on branch ``cool-feature``, the last command can be written more succinctly as:: git rebase upstream/master When all looks good you can delete your backup branch:: git branch -D tmp If it doesn't look good you may need to have a look at :ref:`recovering-from-mess-up`. If you have made changes to files that have also changed in trunk, this may generate merge conflicts that you need to resolve - see the `git rebase`_ man page for some instructions at the end of the "Description" section. There is some related help on merging in the git user manual - see `resolving a merge`_. .. _recovering-from-mess-up: Recovering from mess-ups ------------------------ Sometimes, you mess up merges or rebases. Luckily, in git it is relatively straightforward to recover from such mistakes. If you mess up during a rebase:: git rebase --abort If you notice you messed up after the rebase:: # reset branch back to the saved point git reset --hard tmp If you forgot to make a backup branch:: # look at the reflog of the branch git reflog show cool-feature 8630830 cool-feature@{0}: commit: BUG: io: close file handles immediately 278dd2a cool-feature@{1}: rebase finished: refs/heads/my-feature-branch onto 11ee694744f2552d 26aa21a cool-feature@{2}: commit: BUG: lib: make seek_gzip_factory not leak gzip obj ... # reset the branch to where it was before the botched rebase git reset --hard cool-feature@{2} .. _rewriting-commit-history: Rewriting commit history ------------------------ .. note:: Do this only for your own feature branches. There's an embarassing typo in a commit you made? Or perhaps the you made several false starts you would like the posterity not to see. This can be done via *interactive rebasing*. Suppose that the commit history looks like this:: git log --oneline eadc391 Fix some remaining bugs a815645 Modify it so that it works 2dec1ac Fix a few bugs + disable 13d7934 First implementation 6ad92e5 * masked is now an instance of a new object, MaskedConstant 29001ed Add pre-nep for a copule of structured_array_extensions. ... and ``6ad92e5`` is the last commit in the ``cool-feature`` branch. Suppose we want to make the following changes: * Rewrite the commit message for ``13d7934`` to something more sensible. * Combine the commits ``2dec1ac``, ``a815645``, ``eadc391`` into a single one. We do as follows:: # make a backup of the current state git branch tmp HEAD # interactive rebase git rebase -i 6ad92e5 This will open an editor with the following text in it:: pick 13d7934 First implementation pick 2dec1ac Fix a few bugs + disable pick a815645 Modify it so that it works pick eadc391 Fix some remaining bugs # Rebase 6ad92e5..eadc391 onto 6ad92e5 # # Commands: # p, pick = use commit # r, reword = use commit, but edit the commit message # e, edit = use commit, but stop for amending # s, squash = use commit, but meld into previous commit # f, fixup = like "squash", but discard this commit's log message # # If you remove a line here THAT COMMIT WILL BE LOST. # However, if you remove everything, the rebase will be aborted. # To achieve what we want, we will make the following changes to it:: r 13d7934 First implementation pick 2dec1ac Fix a few bugs + disable f a815645 Modify it so that it works f eadc391 Fix some remaining bugs This means that (i) we want to edit the commit message for ``13d7934``, and (ii) collapse the last three commits into one. Now we save and quit the editor. Git will then immediately bring up an editor for editing the commit message. After revising it, we get the output:: [detached HEAD 721fc64] FOO: First implementation 2 files changed, 199 insertions(+), 66 deletions(-) [detached HEAD 0f22701] Fix a few bugs + disable 1 files changed, 79 insertions(+), 61 deletions(-) Successfully rebased and updated refs/heads/my-feature-branch. and the history looks now like this:: 0f22701 Fix a few bugs + disable 721fc64 ENH: Sophisticated feature 6ad92e5 * masked is now an instance of a new object, MaskedConstant If it went wrong, recovery is again possible as explained :ref:`above `. .. include:: links.inc networkx-1.8.1/doc/source/developer/gitwash/configure_git.rst0000664000175000017500000001130012177456333024346 0ustar aricaric00000000000000.. _configure-git: =============== Configure git =============== .. _git-config-basic: Overview ======== Your personal git configurations are saved in the ``.gitconfig`` file in your home directory. Here is an example ``.gitconfig`` file:: [user] name = Your Name email = you@yourdomain.example.com [alias] ci = commit -a co = checkout st = status stat = status br = branch wdiff = diff --color-words [core] editor = vim [merge] summary = true You can edit this file directly or you can use the ``git config --global`` command:: git config --global user.name "Your Name" git config --global user.email you@yourdomain.example.com git config --global alias.ci "commit -a" git config --global alias.co checkout git config --global alias.st "status -a" git config --global alias.stat "status -a" git config --global alias.br branch git config --global alias.wdiff "diff --color-words" git config --global core.editor vim git config --global merge.summary true To set up on another computer, you can copy your ``~/.gitconfig`` file, or run the commands above. In detail ========= user.name and user.email ------------------------ It is good practice to tell git_ who you are, for labeling any changes you make to the code. The simplest way to do this is from the command line:: git config --global user.name "Your Name" git config --global user.email you@yourdomain.example.com This will write the settings into your git configuration file, which should now contain a user section with your name and email:: [user] name = Your Name email = you@yourdomain.example.com Of course you'll need to replace ``Your Name`` and ``you@yourdomain.example.com`` with your actual name and email address. Aliases ------- You might well benefit from some aliases to common commands. For example, you might well want to be able to shorten ``git checkout`` to ``git co``. Or you may want to alias ``git diff --color-words`` (which gives a nicely formatted output of the diff) to ``git wdiff`` The following ``git config --global`` commands:: git config --global alias.ci "commit -a" git config --global alias.co checkout git config --global alias.st "status -a" git config --global alias.stat "status -a" git config --global alias.br branch git config --global alias.wdiff "diff --color-words" will create an ``alias`` section in your ``.gitconfig`` file with contents like this:: [alias] ci = commit -a co = checkout st = status -a stat = status -a br = branch wdiff = diff --color-words Editor ------ You may also want to make sure that your editor of choice is used :: git config --global core.editor vim Merging ------- To enforce summaries when doing merges (``~/.gitconfig`` file again):: [merge] log = true Or from the command line:: git config --global merge.log true .. _fancy-log: Fancy log output ---------------- This is a very nice alias to get a fancy log output; it should go in the ``alias`` section of your ``.gitconfig`` file:: lg = log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)[%an]%Creset' --abbrev-commit --date=relative You use the alias with:: git lg and it gives graph / text output something like this (but with color!):: * 6d8e1ee - (HEAD, origin/my-fancy-feature, my-fancy-feature) NF - a fancy file (45 minutes ago) [Matthew Brett] * d304a73 - (origin/placeholder, placeholder) Merge pull request #48 from hhuuggoo/master (2 weeks ago) [Jonathan Terhorst] |\ | * 4aff2a8 - fixed bug 35, and added a test in test_bugfixes (2 weeks ago) [Hugo] |/ * a7ff2e5 - Added notes on discussion/proposal made during Data Array Summit. (2 weeks ago) [Corran Webster] * 68f6752 - Initial implimentation of AxisIndexer - uses 'index_by' which needs to be changed to a call on an Axes object - this is all very sketchy right now. (2 weeks ago) [Corr * 376adbd - Merge pull request #46 from terhorst/master (2 weeks ago) [Jonathan Terhorst] |\ | * b605216 - updated joshu example to current api (3 weeks ago) [Jonathan Terhorst] | * 2e991e8 - add testing for outer ufunc (3 weeks ago) [Jonathan Terhorst] | * 7beda5a - prevent axis from throwing an exception if testing equality with non-axis object (3 weeks ago) [Jonathan Terhorst] | * 65af65e - convert unit testing code to assertions (3 weeks ago) [Jonathan Terhorst] | * 956fbab - Merge remote-tracking branch 'upstream/master' (3 weeks ago) [Jonathan Terhorst] | |\ | |/ Thanks to Yury V. Zaytsev for posting it. .. include:: links.inc networkx-1.8.1/doc/source/developer/index.rst0000664000175000017500000000014012177456333021163 0ustar aricaric00000000000000.. _developer: Developer Guide *************** .. toctree:: :maxdepth: 2 gitwash/index networkx-1.8.1/doc/source/test.rst0000664000175000017500000000304212177456333017052 0ustar aricaric00000000000000******* Testing ******* Requirements for testing ======================== NetworkX uses the Python nose testing package. If you don't already have that package installed, follow the directions here http://somethingaboutorange.com/mrl/projects/nose Testing a source distribution ============================= You can test the complete package from the unpacked source directory with:: python setup_egg.py nosetests Testing an installed package ============================ If you have a file-based (not a Python egg) installation you can test the installed package with >>> import networkx >>> networkx.test() or:: python -c "import networkx; networkx.test()" Testing for developers ====================== You can test any or all of NetworkX by using the "nosetests" test runner. First make sure the NetworkX version you want to test is in your PYTHONPATH (either installed or pointing to your unpacked source directory). Then you can run individual test files with:: nosetests path/to/file or all tests found in dir and an directories contained in dir:: nosetests path/to/dir By default nosetests doesn't test docutils style tests in Python modules but you can turn that on with:: nosetests --with-doctest For doctests in stand-alone files NetworkX uses the extension txt so you can add:: nosetests --with-doctest --doctest-extension=txt to also execute those tests. These options are on by default if you run nosetests from the root of the NetworkX distribution since they are specified in the setup.cfg file found there. networkx-1.8.1/doc/source/bibliography.rst0000664000175000017500000000475112177456333020556 0ustar aricaric00000000000000.. -*- coding: utf-8 -*- Bibliography ============ .. [BA02] R. Albert and A.-L. Barabási, "Statistical mechanics of complex networks", Reviews of Modern Physics, 74, pp. 47-97, 2002. :arxiv:`cond-mat/0106096` .. [Bollobas01] B. Bollobás, "Random Graphs", Second Edition, Cambridge University Press, 2001. .. [BE05] U. Brandes and T. Erlebach, "Network Analysis: Methodological Foundations", Lecture Notes in Computer Science, Volume 3418, Springer-Verlag, 2005. .. [CL1996] G. Chartrand and L. Lesniak, "Graphs and Digraphs", Chapman and Hall/CRC, 1996. .. [choudum1986] S.A. Choudum. "A simple proof of the ErdÅ‘s-Gallai theorem on graph sequences." Bulletin of the Australian Mathematical Society, 33, pp 67-70, 1986. :doi:`10.1017/S0004972700002872` .. [Diestel97] R. Diestel, "Graph Theory", Springer-Verlag, 1997. :url:`http://diestel-graph-theory.com/index.html` .. [DM03] S.N. Dorogovtsev and J.F.F. Mendes, "Evolution of Networks", Oxford University Press, 2003. .. [EppsteinPads] David Eppstein. PADS, A library of Python Algorithms and Data Structures. :url:`http://www.ics.uci.edu/~eppstein/PADS` .. [EG1960] ErdÅ‘s and Gallai, Mat. Lapok 11 264, 1960. .. [hakimi1962] Hakimi, S. "On the Realizability of a Set of Integers as Degrees of the Vertices of a Graph." SIAM J. Appl. Math. 10, 496-506, 1962. .. [havel1955] Havel, V. "A Remark on the Existence of Finite Graphs" Casopis Pest. Mat. 80, 477-480, 1955. .. [Langtangen04] H.P. Langtangen, "Python Scripting for Computational Science.", Springer Verlag Series in Computational Science and Engineering, 2004. .. [Martelli03] A. Martelli, "Python in a Nutshell", O'Reilly Media Inc, 2003. (A useful guide to the language: :pdf:`http://www.oreilly.com/catalog/pythonian/chapter/ch04.pdf`) .. [Newman03] M.E.J. Newman, "The Structure and Function of Complex Networks", SIAM Review, 45, pp. 167-256, 2003. :url:`http://epubs.siam.org/doi/abs/10.1137/S003614450342480` .. [Sedgewick02] R. Sedgewick, "Algorithms in C: Parts 1-4: Fundamentals, Data Structure, Sorting, Searching", Addison Wesley Professional, 3rd ed., 2002. .. [Sedgewick01] R. Sedgewick, "Algorithms in C, Part 5: Graph Algorithms", Addison Wesley Professional, 3rd ed., 2001. .. [West01] D. B. West, "Introduction to Graph Theory", Prentice Hall, 2nd ed., 2001. .. [vanRossum98] Guido van Rossum. Python Patterns - Implementing Graphs, 1998. :url:`http://www.python.org/doc/essays/graphs` networkx-1.8.1/doc/source/install.rst0000664000175000017500000000741112177456333017545 0ustar aricaric00000000000000********** Installing ********** Quick install ============= Get NetworkX from the Python Package Index at http://pypi.python.org/pypi/networkx or install it with:: pip install networkx and an attempt will be made to find and install an appropriate version that matches your operating system and Python version. You can install the development version (at github.com) with:: pip install git://github.com/networkx/networkx.git#egg=networkx More download file options are at http://networkx.github.io/download.html Installing from source ====================== You can install from source by downloading a source archive file (tar.gz or zip) or by checking out the source files from the Mercurial source code repository. NetworkX is a pure Python package; you don't need a compiler to build or install it. Source archive file ------------------- 1. Download the source (tar.gz or zip file) from https://pypi.python.org/pypi/networkx/ or get the latest development version from https://github.com/networkx/networkx/ 2. Unpack and change directory to the source directory (it should have the files README.txt and setup.py). 3. Run "python setup.py install" to build and install 4. (optional) Run "python setup_egg.py nosetests" to execute the tests Github ------ 1. Clone the networkx repostitory git clone https://github.com/networkx/networkx.git (see https://github.com/networkx/networkx/ for other options) 2. Change directory to "networkx" 3. Run "python setup.py install" to build and install 4. (optional) Run "python setup_egg.py nosetests" to execute the tests If you don't have permission to install software on your system, you can install into another directory using the --user, --prefix, or --home flags to setup.py. For example :: python setup.py install --prefix=/home/username/python or python setup.py install --home=~ or python setup.py install --user If you didn't install in the standard Python site-packages directory you will need to set your PYTHONPATH variable to the alternate location. Seehttp://docs.python.org/2/install/index.html#search-path for further details. Requirements ============ Python ------ To use NetworkX you need Python version 2.6 or later. Most of NetworkX works with Python version 3.1.2 or later. http://www.python.org/ The easiest way to get Python and most optional packages is to install the Enthought Python distribution "Canopy" https://www.enthought.com/products/canopy/ There are several other distributions that contain the key packages you need for scientific computing. See the following link for a list: http://scipy.org/install.html Optional packages ================= The following are optional packages that NetworkX can use to provide additional functions. NumPy ----- Provides matrix representation of graphs and is used in some graph algorithms for high-performance matrix computations. - Download: http://scipy.org/Download SciPy ----- Provides sparse matrix representation of graphs and many numerical scientific tools. - Download: http://scipy.org/Download Matplotlib ---------- Provides flexible drawing of graphs. - Download: http://matplotlib.sourceforge.net/ GraphViz -------- In conjunction with either - PyGraphviz: http://networkx.lanl.gov/pygraphviz/ or - pydot: http://code.google.com/p/pydot/ provides graph drawing and graph layout algorithms. - Download: http://graphviz.org/ Pyparsing --------- http://pyparsing.wikispaces.com/ Required for pydot, GML file reading. PyYAML ------ http://pyyaml.org/ Required for YAML format reading and writing. Other packages --------------- These are extra packages you may consider using with NetworkX - IPython, interactive Python shell, http://ipython.scipy.org/ networkx-1.8.1/doc/source/conf.py0000664000175000017500000001307712177456333016651 0ustar aricaric00000000000000# -*- coding: utf-8 -*- # # Sphinx documentation build configuration file, created by # sphinx-quickstart.py on Sat Mar 8 21:47:50 2008. # # This file is execfile()d with the current directory set to its containing dir. # # The contents of this file are pickled, so don't put values in the namespace # that aren't pickleable (module imports are okay, they're removed automatically). # # All configuration values have a default value; values that are commented out # serve to show the default value. import sys, os, re # Check Sphinx version import sphinx if sphinx.__version__ < "1.0.1": raise RuntimeError("Sphinx 1.0.1 or newer required") # If your extensions are in another directory, add it here. sys.path.append(os.path.abspath('../sphinxext')) # General configuration # --------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.addons.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.pngmath', 'sphinx.ext.viewcode', # 'sphinx.ext.mathjax', 'numpydoc', 'sphinx.ext.coverage', 'sphinx.ext.autosummary','sphinx.ext.todo','sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'customroles'] # generate autosummary pages autosummary_generate=True # Add any paths that contain templates here, relative to this directory. templates_path = ['templates','../rst_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General substitutions. project = 'NetworkX' copyright = '2013, NetworkX Developers' # The default replacements for |version| and |release|, also used in various # other places throughout the built documents. # # The short X.Y version. import networkx version = networkx.__version__ # The full version, including dev info release = networkx.__version__.replace('_','') # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. # unused_docs = ['reference/pdf_reference'] # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # show_authors = True # The name of the Pygments (syntax highlighting) style to use. #pygments_style = 'friendly' pygments_style = 'sphinx' # A list of prefixs that are ignored when creating the module index. (new in Sphinx 0.6) modindex_common_prefix=['networkx.'] doctest_global_setup="import networkx as nx" # Options for HTML output # ----------------------- html_theme = "sphinxdoc" #html_theme_options = { # "rightsidebar": "true", # "relbarbgcolor: "black" #} # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. html_style = 'networkx.css' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Content template for the index page. #html_index = 'index.html' html_index = 'contents.html' # Custom sidebar templates, maps page names to templates. #html_sidebars = {'index': 'indexsidebar.html'} # Additional templates that should be rendered to pages, maps page names to # templates. #html_additional_pages = {'index': 'index.html','gallery':'gallery.html'} html_additional_pages = {'gallery':'gallery.html'} # If true, the reST sources are included in the HTML build as _sources/. html_copy_source = False html_use_opensearch = 'http://networkx.github.io' # Output file base name for HTML help builder. htmlhelp_basename = 'NetworkX' pngmath_use_preview = True # Options for LaTeX output # ------------------------ # The paper size ('letter' or 'a4'). latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class [howto/manual]). latex_documents = [('tutorial/index', 'networkx_tutorial.tex', 'NetworkX Tutorial', 'Aric Hagberg, Dan Schult, Pieter Swart', 'howto', 1), ('reference/pdf_reference', 'networkx_reference.tex', 'NetworkX Reference', 'Aric Hagberg, Dan Schult, Pieter Swart', 'manual', 1)] #latex_appendices = ['installing']#,'legal'],'citing','credits','history'] #latex_appendices = ['credits'] # Intersphinx mapping intersphinx_mapping = {'http://docs.python.org/': None, 'http://docs.scipy.org/doc/numpy/': None, } # For trac custom roles default_role = 'math' trac_url = 'https://networkx.lanl.gov/trac/' #mathjax_path = 'http://mathjax.connectmv.com/MathJax.js' networkx-1.8.1/doc/source/index.rst0000664000175000017500000000122212177456333017200 0ustar aricaric00000000000000.. _contents: NetworkX documentation ====================== .. only:: html :Release: |version| :Date: |today| Tutorial :download:`[PDF] ` Reference :download:`[PDF] ` Tutorial+Reference :download:`[HTML zip] ` .. toctree:: :maxdepth: 2 overview download install tutorial/index reference/index test developer/index reference/history bibliography examples/index .. only:: html - `Gallery `_ Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` * :ref:`glossary` networkx-1.8.1/doc/source/tutorial/0000775000175000017500000000000012177457361017207 5ustar aricaric00000000000000networkx-1.8.1/doc/source/tutorial/tutorial.rst0000664000175000017500000003246212177456333021611 0ustar aricaric00000000000000.. -*- coding: utf-8 -*- .. currentmodule:: networkx Start here to begin working with NetworkX. Creating a graph ---------------- Create an empty graph with no nodes and no edges. >>> import networkx as nx >>> G=nx.Graph() By definition, a :class:`Graph` is a collection of nodes (vertices) along with identified pairs of nodes (called edges, links, etc). In NetworkX, nodes can be any hashable object e.g. a text string, an image, an XML object, another Graph, a customized node object, etc. (Note: Python's None object should not be used as a node as it determines whether optional function arguments have been assigned in many functions.) Nodes ----- The graph G can be grown in several ways. NetworkX includes many graph generator functions and facilities to read and write graphs in many formats. To get started though we'll look at simple manipulations. You can add one node at a time, >>> G.add_node(1) add a list of nodes, >>> G.add_nodes_from([2,3]) or add any :term:`nbunch` of nodes. An *nbunch* is any iterable container of nodes that is not itself a node in the graph. (e.g. a list, set, graph, file, etc..) >>> H=nx.path_graph(10) >>> G.add_nodes_from(H) Note that G now contains the nodes of H as nodes of G. In contrast, you could use the graph H as a node in G. >>> G.add_node(H) The graph G now contains H as a node. This flexibility is very powerful as it allows graphs of graphs, graphs of files, graphs of functions and much more. It is worth thinking about how to structure your application so that the nodes are useful entities. Of course you can always use a unique identifier in G and have a separate dictionary keyed by identifier to the node information if you prefer. (Note: You should not change the node object if the hash depends on its contents.) Edges ----- G can also be grown by adding one edge at a time, >>> G.add_edge(1,2) >>> e=(2,3) >>> G.add_edge(*e) # unpack edge tuple* by adding a list of edges, >>> G.add_edges_from([(1,2),(1,3)]) or by adding any :term:`ebunch` of edges. An *ebunch* is any iterable container of edge-tuples. An edge-tuple can be a 2-tuple of nodes or a 3-tuple with 2 nodes followed by an edge attribute dictionary, e.g. (2,3,{'weight':3.1415}). Edge attributes are discussed further below >>> G.add_edges_from(H.edges()) One can demolish the graph in a similar fashion; using :meth:`Graph.remove_node`, :meth:`Graph.remove_nodes_from`, :meth:`Graph.remove_edge` and :meth:`Graph.remove_edges_from`, e.g. >>> G.remove_node(H) There are no complaints when adding existing nodes or edges. For example, after removing all nodes and edges, >>> G.clear() we add new nodes/edges and NetworkX quietly ignores any that are already present. >>> G.add_edges_from([(1,2),(1,3)]) >>> G.add_node(1) >>> G.add_edge(1,2) >>> G.add_node("spam") # adds node "spam" >>> G.add_nodes_from("spam") # adds 4 nodes: 's', 'p', 'a', 'm' At this stage the graph G consists of 8 nodes and 2 edges, as can be seen by: >>> G.number_of_nodes() 8 >>> G.number_of_edges() 2 We can examine them with >>> G.nodes() ['a', 1, 2, 3, 'spam', 'm', 'p', 's'] >>> G.edges() [(1, 2), (1, 3)] >>> G.neighbors(1) [2, 3] Removing nodes or edges has similar syntax to adding: >>> G.remove_nodes_from("spam") >>> G.nodes() [1, 2, 3, 'spam'] >>> G.remove_edge(1,3) When creating a graph structure (by instantiating one of the graph classes you can specify data in several formats. >>> H=nx.DiGraph(G) # create a DiGraph using the connections from G >>> H.edges() [(1, 2), (2, 1)] >>> edgelist=[(0,1),(1,2),(2,3)] >>> H=nx.Graph(edgelist) What to use as nodes and edges ------------------------------ You might notice that nodes and edges are not specified as NetworkX objects. This leaves you free to use meaningful items as nodes and edges. The most common choices are numbers or strings, but a node can be any hashable object (except None), and an edge can be associated with any object x using G.add_edge(n1,n2,object=x). As an example, n1 and n2 could be protein objects from the RCSB Protein Data Bank, and x could refer to an XML record of publications detailing experimental observations of their interaction. We have found this power quite useful, but its abuse can lead to unexpected surprises unless one is familiar with Python. If in doubt, consider using :func:`convert_node_labels_to_integers` to obtain a more traditional graph with integer labels. Accessing edges --------------- In addition to the methods :meth:`Graph.nodes`, :meth:`Graph.edges`, and :meth:`Graph.neighbors`, iterator versions (e.g. :meth:`Graph.edges_iter`) can save you from creating large lists when you are just going to iterate through them anyway. Fast direct access to the graph data structure is also possible using subscript notation. .. Warning:: Do not change the returned dict--it is part of the graph data structure and direct manipulation may leave the graph in an inconsistent state. >>> G[1] # Warning: do not change the resulting dict {2: {}} >>> G[1][2] {} You can safely set the attributes of an edge using subscript notation if the edge already exists. >>> G.add_edge(1,3) >>> G[1][3]['color']='blue' Fast examination of all edges is achieved using adjacency iterators. Note that for undirected graphs this actually looks at each edge twice. >>> FG=nx.Graph() >>> FG.add_weighted_edges_from([(1,2,0.125),(1,3,0.75),(2,4,1.2),(3,4,0.375)]) >>> for n,nbrs in FG.adjacency_iter(): ... for nbr,eattr in nbrs.items(): ... data=eattr['weight'] ... if data<0.5: print('(%d, %d, %.3f)' % (n,nbr,data)) (1, 2, 0.125) (2, 1, 0.125) (3, 4, 0.375) (4, 3, 0.375) Adding attributes to graphs, nodes, and edges --------------------------------------------- Attributes such as weights, labels, colors, or whatever Python object you like, can be attached to graphs, nodes, or edges. Each graph, node, and edge can hold key/value attribute pairs in an associated attribute dictionary (the keys must be hashable). By default these are empty, but attributes can be added or changed using add_edge, add_node or direct manipulation of the attribute dictionaries named G.graph, G.node and G.edge for a graph G. Graph attributes ~~~~~~~~~~~~~~~~ Assign graph attributes when creating a new graph >>> G = nx.Graph(day="Friday") >>> G.graph {'day': 'Friday'} Or you can modify attributes later >>> G.graph['day']='Monday' >>> G.graph {'day': 'Monday'} Node attributes ~~~~~~~~~~~~~~~ Add node attributes using add_node(), add_nodes_from() or G.node >>> G.add_node(1, time='5pm') >>> G.add_nodes_from([3], time='2pm') >>> G.node[1] {'time': '5pm'} >>> G.node[1]['room'] = 714 >>> G.nodes(data=True) [(1, {'room': 714, 'time': '5pm'}), (3, {'time': '2pm'})] Note that adding a node to G.node does not add it to the graph, use G.add_node() to add new nodes. Edge Attributes ~~~~~~~~~~~~~~~ Add edge attributes using add_edge(), add_edges_from(), subscript notation, or G.edge. >>> G.add_edge(1, 2, weight=4.7 ) >>> G.add_edges_from([(3,4),(4,5)], color='red') >>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})]) >>> G[1][2]['weight'] = 4.7 >>> G.edge[1][2]['weight'] = 4 The special attribute 'weight' should be numeric and holds values used by algorithms requiring weighted edges. Directed graphs --------------- The DiGraph class provides additional methods specific to directed edges, e.g. :meth:`DiGraph.out_edges`, :meth:`DiGraph.in_degree`, :meth:`DiGraph.predecessors`, :meth:`DiGraph.successors` etc. To allow algorithms to work with both classes easily, the directed versions of neighbors() and degree() are equivalent to successors() and the sum of in_degree() and out_degree() respectively even though that may feel inconsistent at times. >>> DG=nx.DiGraph() >>> DG.add_weighted_edges_from([(1,2,0.5), (3,1,0.75)]) >>> DG.out_degree(1,weight='weight') 0.5 >>> DG.degree(1,weight='weight') 1.25 >>> DG.successors(1) [2] >>> DG.neighbors(1) [2] Some algorithms work only for directed graphs and others are not well defined for directed graphs. Indeed the tendency to lump directed and undirected graphs together is dangerous. If you want to treat a directed graph as undirected for some measurement you should probably convert it using :meth:`Graph.to_undirected` or with >>> H= nx.Graph(G) # convert H to undirected graph Multigraphs ----------- NetworkX provides classes for graphs which allow multiple edges between any pair of nodes. The :class:`MultiGraph` and :class:`MultiDiGraph` classes allow you to add the same edge twice, possibly with different edge data. This can be powerful for some applications, but many algorithms are not well defined on such graphs. Shortest path is one example. Where results are well defined, e.g. :meth:`MultiGraph.degree` we provide the function. Otherwise you should convert to a standard graph in a way that makes the measurement well defined. >>> MG=nx.MultiGraph() >>> MG.add_weighted_edges_from([(1,2,.5), (1,2,.75), (2,3,.5)]) >>> MG.degree(weight='weight') {1: 1.25, 2: 1.75, 3: 0.5} >>> GG=nx.Graph() >>> for n,nbrs in MG.adjacency_iter(): ... for nbr,edict in nbrs.items(): ... minvalue=min([d['weight'] for d in edict.values()]) ... GG.add_edge(n,nbr, weight = minvalue) ... >>> nx.shortest_path(GG,1,3) [1, 2, 3] Graph generators and graph operations ------------------------------------- In addition to constructing graphs node-by-node or edge-by-edge, they can also be generated by 1. Applying classic graph operations, such as:: subgraph(G, nbunch) - induce subgraph of G on nodes in nbunch union(G1,G2) - graph union disjoint_union(G1,G2) - graph union assuming all nodes are different cartesian_product(G1,G2) - return Cartesian product graph compose(G1,G2) - combine graphs identifying nodes common to both complement(G) - graph complement create_empty_copy(G) - return an empty copy of the same graph class convert_to_undirected(G) - return an undirected representation of G convert_to_directed(G) - return a directed representation of G 2. Using a call to one of the classic small graphs, e.g. >>> petersen=nx.petersen_graph() >>> tutte=nx.tutte_graph() >>> maze=nx.sedgewick_maze_graph() >>> tet=nx.tetrahedral_graph() 3. Using a (constructive) generator for a classic graph, e.g. >>> K_5=nx.complete_graph(5) >>> K_3_5=nx.complete_bipartite_graph(3,5) >>> barbell=nx.barbell_graph(10,10) >>> lollipop=nx.lollipop_graph(10,20) 4. Using a stochastic graph generator, e.g. >>> er=nx.erdos_renyi_graph(100,0.15) >>> ws=nx.watts_strogatz_graph(30,3,0.1) >>> ba=nx.barabasi_albert_graph(100,5) >>> red=nx.random_lobster(100,0.9,0.9) 5. Reading a graph stored in a file using common graph formats, such as edge lists, adjacency lists, GML, GraphML, pickle, LEDA and others. >>> nx.write_gml(red,"path.to.file") >>> mygraph=nx.read_gml("path.to.file") Details on graph formats: :doc:`/reference/readwrite` Details on graph generator functions: :doc:`/reference/generators` Analyzing graphs ---------------- The structure of G can be analyzed using various graph-theoretic functions such as: >>> G=nx.Graph() >>> G.add_edges_from([(1,2),(1,3)]) >>> G.add_node("spam") # adds node "spam" >>> nx.connected_components(G) [[1, 2, 3], ['spam']] >>> sorted(nx.degree(G).values()) [0, 1, 1, 2] >>> nx.clustering(G) {1: 0.0, 2: 0.0, 3: 0.0, 'spam': 0.0} Functions that return node properties return dictionaries keyed by node label. >>> nx.degree(G) {1: 2, 2: 1, 3: 1, 'spam': 0} For values of specific nodes, you can provide a single node or an nbunch of nodes as argument. If a single node is specified, then a single value is returned. If an nbunch is specified, then the function will return a dictionary. >>> nx.degree(G,1) 2 >>> G.degree(1) 2 >>> G.degree([1,2]) {1: 2, 2: 1} >>> sorted(G.degree([1,2]).values()) [1, 2] >>> sorted(G.degree().values()) [0, 1, 1, 2] Details on graph algorithms supported: :doc:`/reference/algorithms` Drawing graphs -------------- NetworkX is not primarily a graph drawing package but basic drawing with Matplotlib as well as an interface to use the open source Graphviz software package are included. These are part of the networkx.drawing package and will be imported if possible. See :doc:`/reference/drawing` for details. Note that the drawing package in NetworkX is not yet compatible with Python versions 3.0 and above. First import Matplotlib's plot interface (pylab works too) >>> import matplotlib.pyplot as plt You may find it useful to interactively test code using "ipython -pylab", which combines the power of ipython and matplotlib and provides a convenient interactive mode. To test if the import of networkx.drawing was successful draw G using one of >>> nx.draw(G) >>> nx.draw_random(G) >>> nx.draw_circular(G) >>> nx.draw_spectral(G) when drawing to an interactive display. Note that you may need to issue a Matplotlib >>> plt.show() command if you are not using matplotlib in interactive mode: (See `Matplotlib FAQ `_ ) To save drawings to a file, use, for example >>> nx.draw(G) >>> plt.savefig("path.png") writes to the file "path.png" in the local directory. If Graphviz and PyGraphviz, or pydot, are available on your system, you can also use >>> nx.draw_graphviz(G) >>> nx.write_dot(G,'file.dot') Details on drawing graphs: :doc:`/reference/drawing` networkx-1.8.1/doc/source/tutorial/index.rst0000664000175000017500000000062312177456333021047 0ustar aricaric00000000000000.. -*- coding: utf-8 -*- ******** Tutorial ******** .. toctree:: :maxdepth: 2 tutorial **What Next** Now that you have an idea of what the NetworkX package provides, you should investigate the parts of the package most useful for you. :doc:`Reference Section` provides details on NetworkX. :doc:`/examples/index` provides some example programs written using NetworkX. networkx-1.8.1/doc/source/download.rst0000664000175000017500000000075412177456333017711 0ustar aricaric00000000000000-------- Download -------- Software ~~~~~~~~ Source and binary releases: http://cheeseshop.python.org/pypi/networkx/ Github (latest development): https://github.com/networkx/networkx/ Documentation ~~~~~~~~~~~~~ *PDF* http://networkx.github.io/documentation/latest/_downloads/networkx_tutorial.pdf http://networkx.github.io/documentation/latest/_downloads/networkx_reference.pdf *HTML in zip file* http://networkx.github.io/documentation/latest/_downloads/networkx-documentation.zip networkx-1.8.1/doc/gitwash_dumper.py0000664000175000017500000001725512177456333017450 0ustar aricaric00000000000000#!/usr/bin/env python ''' Checkout gitwash repo into directory and do search replace on name ''' import os from os.path import join as pjoin import shutil import sys import re import glob import fnmatch import tempfile from subprocess import call from optparse import OptionParser verbose = False def clone_repo(url, branch): cwd = os.getcwd() tmpdir = tempfile.mkdtemp() try: cmd = 'git clone %s %s' % (url, tmpdir) call(cmd, shell=True) os.chdir(tmpdir) cmd = 'git checkout %s' % branch call(cmd, shell=True) except: shutil.rmtree(tmpdir) raise finally: os.chdir(cwd) return tmpdir def cp_files(in_path, globs, out_path): try: os.makedirs(out_path) except OSError: pass out_fnames = [] for in_glob in globs: in_glob_path = pjoin(in_path, in_glob) for in_fname in glob.glob(in_glob_path): out_fname = in_fname.replace(in_path, out_path) pth, _ = os.path.split(out_fname) if not os.path.isdir(pth): os.makedirs(pth) shutil.copyfile(in_fname, out_fname) out_fnames.append(out_fname) return out_fnames def filename_search_replace(sr_pairs, filename, backup=False): ''' Search and replace for expressions in files ''' in_txt = open(filename, 'rt').read(-1) out_txt = in_txt[:] for in_exp, out_exp in sr_pairs: in_exp = re.compile(in_exp) out_txt = in_exp.sub(out_exp, out_txt) if in_txt == out_txt: return False open(filename, 'wt').write(out_txt) if backup: open(filename + '.bak', 'wt').write(in_txt) return True def copy_replace(replace_pairs, repo_path, out_path, cp_globs=('*',), rep_globs=('*',), renames = ()): out_fnames = cp_files(repo_path, cp_globs, out_path) renames = [(re.compile(in_exp), out_exp) for in_exp, out_exp in renames] fnames = [] for rep_glob in rep_globs: fnames += fnmatch.filter(out_fnames, rep_glob) if verbose: print '\n'.join(fnames) for fname in fnames: filename_search_replace(replace_pairs, fname, False) for in_exp, out_exp in renames: new_fname, n = in_exp.subn(out_exp, fname) if n: os.rename(fname, new_fname) break def make_link_targets(proj_name, user_name, repo_name, known_link_fname, out_link_fname, url=None, ml_url=None): """ Check and make link targets If url is None or ml_url is None, check if there are links present for these in `known_link_fname`. If not, raise error. The check is: Look for a target `proj_name`. Look for a target `proj_name` + ' mailing list' Also, look for a target `proj_name` + 'github'. If this exists, don't write this target into the new file below. If we are writing any of the url, ml_url, or github address, then write new file with these links, of form: .. _`proj_name` .. _`proj_name`: url .. _`proj_name` mailing list: url """ link_contents = open(known_link_fname, 'rt').readlines() have_url = not url is None have_ml_url = not ml_url is None have_gh_url = None for line in link_contents: if not have_url: match = re.match(r'..\s+_`%s`:\s+' % proj_name, line) if match: have_url = True if not have_ml_url: match = re.match(r'..\s+_`%s mailing list`:\s+' % proj_name, line) if match: have_ml_url = True if not have_gh_url: match = re.match(r'..\s+_`%s github`:\s+' % proj_name, line) if match: have_gh_url = True if not have_url or not have_ml_url: raise RuntimeError('Need command line or known project ' 'and / or mailing list URLs') lines = [] if not url is None: lines.append('.. _`%s`: %s\n' % (proj_name, url)) if not have_gh_url: gh_url = 'http://github.com/%s/%s\n' % (user_name, repo_name) lines.append('.. _`%s github`: %s\n' % (proj_name, gh_url)) if not ml_url is None: lines.append('.. _`%s mailing list`: %s\n' % (proj_name, ml_url)) if len(lines) == 0: # Nothing to do return # A neat little header line lines = ['.. %s\n' % proj_name] + lines out_links = open(out_link_fname, 'wt') out_links.writelines(lines) out_links.close() USAGE = ''' If not set with options, the repository name is the same as the If not set with options, the main github user is the same as the repository name.''' GITWASH_CENTRAL = 'git://github.com/matthew-brett/gitwash.git' GITWASH_BRANCH = 'master' def main(): parser = OptionParser() parser.set_usage(parser.get_usage().strip() + USAGE) parser.add_option("--repo-name", dest="repo_name", help="repository name - e.g. nitime", metavar="REPO_NAME") parser.add_option("--github-user", dest="main_gh_user", help="github username for main repo - e.g fperez", metavar="MAIN_GH_USER") parser.add_option("--gitwash-url", dest="gitwash_url", help="URL to gitwash repository - default %s" % GITWASH_CENTRAL, default=GITWASH_CENTRAL, metavar="GITWASH_URL") parser.add_option("--gitwash-branch", dest="gitwash_branch", help="branch in gitwash repository - default %s" % GITWASH_BRANCH, default=GITWASH_BRANCH, metavar="GITWASH_BRANCH") parser.add_option("--source-suffix", dest="source_suffix", help="suffix of ReST source files - default '.rst'", default='.rst', metavar="SOURCE_SUFFIX") parser.add_option("--project-url", dest="project_url", help="URL for project web pages", default=None, metavar="PROJECT_URL") parser.add_option("--project-ml-url", dest="project_ml_url", help="URL for project mailing list", default=None, metavar="PROJECT_ML_URL") (options, args) = parser.parse_args() if len(args) < 2: parser.print_help() sys.exit() out_path, project_name = args if options.repo_name is None: options.repo_name = project_name if options.main_gh_user is None: options.main_gh_user = options.repo_name repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) try: copy_replace((('PROJECTNAME', project_name), ('REPONAME', options.repo_name), ('MAIN_GH_USER', options.main_gh_user)), repo_path, out_path, cp_globs=(pjoin('gitwash', '*'),), rep_globs=('*.rst',), renames=(('\.rst$', options.source_suffix),)) make_link_targets(project_name, options.main_gh_user, options.repo_name, pjoin(out_path, 'gitwash', 'known_projects.inc'), pjoin(out_path, 'gitwash', 'this_project.inc'), options.project_url, options.project_ml_url) finally: shutil.rmtree(repo_path) if __name__ == '__main__': main() networkx-1.8.1/doc/gh-pages.py0000775000175000017500000001034512177456333016115 0ustar aricaric00000000000000#!/usr/bin/env python """Script to commit the doc build outputs into the github-pages repo. Use: gh-pages.py [tag] If no tag is given, the current output of 'git describe' is used. If given, that is how the resulting directory will be named. In practice, you should use either actual clean tags from a current build or something like 'current' as a stable URL for the most current version""" # Borrowed from IPython. #----------------------------------------------------------------------------- # Imports #----------------------------------------------------------------------------- import os import re import shutil import sys from os import chdir as cd from os.path import join as pjoin from subprocess import Popen, PIPE, CalledProcessError, check_call #----------------------------------------------------------------------------- # Globals #----------------------------------------------------------------------------- pages_dir = 'gh-pages' html_dir = 'build/dist' pdf_dir = 'build/latex' pages_repo = 'git@github.com:networkx/documentation.git' #----------------------------------------------------------------------------- # Functions #----------------------------------------------------------------------------- def sh(cmd): """Execute command in a subshell, return status code.""" return check_call(cmd, shell=True) def sh2(cmd): """Execute command in a subshell, return stdout. Stderr is unbuffered from the subshell.x""" p = Popen(cmd, stdout=PIPE, shell=True) out = p.communicate()[0] retcode = p.returncode if retcode: raise CalledProcessError(retcode, cmd) else: return out.rstrip() def sh3(cmd): """Execute command in a subshell, return stdout, stderr If anything appears in stderr, print it out to sys.stderr""" p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) out, err = p.communicate() retcode = p.returncode if retcode: raise CalledProcessError(retcode, cmd) else: return out.rstrip(), err.rstrip() def init_repo(path): """clone the gh-pages repo if we haven't already.""" sh("git clone %s %s"%(pages_repo, path)) here = os.getcwdu() cd(path) sh('git checkout gh-pages') cd(here) #----------------------------------------------------------------------------- # Script starts #----------------------------------------------------------------------------- if __name__ == '__main__': # The tag can be given as a positional argument try: tag = sys.argv[1] except IndexError: try: tag = sh2('git describe --exact-match') except CalledProcessError: print("using development as label") tag = "development" # Fallback startdir = os.getcwdu() if not os.path.exists(pages_dir): # init the repo init_repo(pages_dir) else: # ensure up-to-date before operating cd(pages_dir) sh('git checkout gh-pages') sh('git pull') cd(startdir) dest = pjoin(pages_dir, tag) # don't `make html` here, because gh-pages already depends on html in Makefile # sh('make html') if tag != 'dev': # only build pdf for non-dev targets #sh2('make pdf') pass # This is pretty unforgiving: we unconditionally nuke the destination # directory, and then copy the html tree in there shutil.rmtree(dest, ignore_errors=True) shutil.copytree(html_dir, dest) if tag != 'dev': #shutil.copy(pjoin(pdf_dir, 'ipython.pdf'), pjoin(dest, 'ipython.pdf')) pass try: cd(pages_dir) status = sh2('git status | head -1') branch = re.match('\# On branch (.*)$', status).group(1) if branch != 'gh-pages': e = 'On %r, git branch is %r, MUST be "gh-pages"' % (pages_dir, branch) raise RuntimeError(e) sh('git add -A %s' % tag) sh('git commit -m"Updated doc release: %s"' % tag) print print 'Most recent 3 commits:' sys.stdout.flush() sh('git --no-pager log --oneline HEAD~3..') finally: cd(startdir) print print 'Now verify the build in: %r' % dest print "If everything looks good, 'git push'" networkx-1.8.1/doc/rst_templates/0000775000175000017500000000000012177457361016732 5ustar aricaric00000000000000networkx-1.8.1/doc/rst_templates/autosummary/0000775000175000017500000000000012177457361021320 5ustar aricaric00000000000000networkx-1.8.1/doc/rst_templates/autosummary/function.rst0000664000175000017500000000013612177456333023675 0ustar aricaric00000000000000{{ name }} {{ underline }} .. currentmodule:: {{ module }} .. autofunction:: {{ objname }} networkx-1.8.1/doc/rst_templates/autosummary/base.rst0000664000175000017500000000014212177456333022757 0ustar aricaric00000000000000{{ name }} {{ underline }} .. currentmodule:: {{ module }} .. auto{{ objtype }}:: {{ objname }} networkx-1.8.1/doc/rst_templates/autosummary/class.rst0000664000175000017500000000101312177456333023150 0ustar aricaric00000000000000{{ name }} {{ underline }} .. currentmodule:: {{ module }} .. autoclass:: {{ objname }} {% block methods %} .. automethod:: __init__ {% if methods %} .. rubric:: Methods .. autosummary:: {% for item in methods %} ~{{ name }}.{{ item }} {%- endfor %} {% endif %} {% endblock %} {% block attributes %} {% if attributes %} .. rubric:: Attributes .. autosummary:: {% for item in attributes %} ~{{ name }}.{{ item }} {%- endfor %} {% endif %} {% endblock %} networkx-1.8.1/doc/rst_templates/autosummary/module.rst0000664000175000017500000000116312177456333023336 0ustar aricaric00000000000000{{ name }} {{ underline }} .. automodule:: {{ name }} {% block functions %} {% if functions %} .. rubric:: Functions .. autosummary:: {% for item in functions %} {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block classes %} {% if classes %} .. rubric:: Classes .. autosummary:: {% for item in classes %} {{ item }} {%- endfor %} {% endif %} {% endblock %} {% block exceptions %} {% if exceptions %} .. rubric:: Exceptions .. autosummary:: {% for item in exceptions %} {{ item }} {%- endfor %} {% endif %} {% endblock %} networkx-1.8.1/doc/Makefile0000664000175000017500000001030712177456333015503 0ustar aricaric00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest epub help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " epub to make an epub" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf build/* source/reference/generated/* source/examples/* source/static/examples doc/source/*.pdf doc/source/*.zip -rm -rf ../examples/*/*.png generate: build/generate-stamp build/generate-stamp: $(wildcard source/reference/*.rst) mkdir -p build ./make_gallery.py ./make_examples_rst.py ../examples source touch build/generate-stamp dist: html test -d build/latex || make latex make -C build/latex all-pdf -rm -rf build/dist (cd build/html; cp -r . ../../build/dist) -rm -f build/dist/_downloads/* (cd build/html && zip -9r ../dist/_downloads/networkx-documentation.zip .) cp build/latex/*.pdf build/dist/_downloads # (cd build/dist && ln -s _downloads/* .) (cd build/dist && tar czf ../dist.tar.gz .) html: generate touch source/networkx_tutorial.pdf touch source/networkx_reference.pdf touch source/networkx-documentation.zip $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html @echo @echo "Build finished. The HTML pages are in build/html." dirhtml: generate $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) build/dirhtml @echo @echo "Build finished. The HTML pages are in build/dirhtml." pickle: generate $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle @echo @echo "Build finished; now you can process the pickle files." json: generate $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: generate $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in build/htmlhelp." qthelp: generate $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) build/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in build/qthelp, like this:" @echo "# qcollectiongenerator build/qthelp/test.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile build/qthelp/test.qhc" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) build/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: generate $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex @echo @echo "Build finished; the LaTeX files are in build/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes @echo @echo "The overview file is in build/changes." linkcheck: generate $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in build/linkcheck/output.txt." doctest: generate $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) build/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in build/doctest/output.txt." gh-pages: clean dist python gh-pages.py $(tag) gitwash-update: python gitwash_dumper.py source/developer networkx \ --project-url=http://networkx.github.io \ --project-ml-url=http://groups.google.com/group/networkx-discuss/ networkx-1.8.1/MANIFEST.in0000664000175000017500000000141112177456333015030 0ustar aricaric00000000000000include MANIFEST.in include setup_egg.py include setup.py include INSTALL.txt include LICENSE.txt include README.txt recursive-include examples *.py *.edgelist *.mbox *.gz *.bz2 *.zip recursive-include doc *.py *.rst Makefile *.html *.png *.txt *.css include scripts/* include networkx/tests/*.txt include networkx/tests/*.py include networkx/*/tests/*.txt include networkx/*/tests/*.py include networkx/*/*/tests/*.txt include networkx/*/*/tests/*.py include networkx/*/*/tests/*.A99 include networkx/*/*/tests/*.B99 include networkx/external/decorator/*/*.py global-exclude *~ global-exclude *.pyc global-exclude .svn prune doc/build prune doc/source/reference/generated prune doc/source/examples prune doc/source/static/examples prune doc/source/templates/gallery.html networkx-1.8.1/README.txt0000664000175000017500000000060512177456333014774 0ustar aricaric00000000000000NetworkX NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. Copyright (C) 2004-2013 NetworkX Developers Aric Hagberg Dan Schult Pieter Swart Distributed with a BSD license; see LICENSE.txt. See http://networkx.lanl.gov/ for more information. networkx-1.8.1/INSTALL.txt0000664000175000017500000000010512177456333015140 0ustar aricaric00000000000000See doc/source/install.rst or http://networkx.lanl.gov/install.html networkx-1.8.1/PKG-INFO0000664000175000017500000000272312177457361014400 0ustar aricaric00000000000000Metadata-Version: 1.1 Name: networkx Version: 1.8.1 Summary: Python package for creating and manipulating graphs and networks Home-page: http://networkx.lanl.gov/ Author: NetworkX Developers Author-email: networkx-discuss@googlegroups.com License: BSD Download-URL: http://networkx.lanl.gov/download/networkx Description: NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks. Keywords: Networks,Graph Theory,Mathematics,network,graph,discrete mathematics,math Platform: Linux Platform: Mac OSX Platform: Windows Platform: Unix Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: BSD License Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.1 Classifier: Programming Language :: Python :: 3.2 Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Topic :: Scientific/Engineering :: Bio-Informatics Classifier: Topic :: Scientific/Engineering :: Information Analysis Classifier: Topic :: Scientific/Engineering :: Mathematics Classifier: Topic :: Scientific/Engineering :: Physics networkx-1.8.1/LICENSE.txt0000664000175000017500000000335412177456333015125 0ustar aricaric00000000000000License ======= NetworkX is distributed with the BSD license. :: Copyright (C) 2004-2011, NetworkX Developers Aric Hagberg Dan Schult Pieter Swart All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the NetworkX Developers nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

    You can get the latest source from http://networkx.lanl.gov/hg/networkx/ or look for released versions in the Python Package Index.