pytools-2015.1.6/0000755000175000017500000000000012613227172014536 5ustar andreasandreas00000000000000pytools-2015.1.6/pytools/0000755000175000017500000000000012613227172016247 5ustar andreasandreas00000000000000pytools-2015.1.6/pytools/__init__.py0000644000175000017500000013747312611471666020406 0ustar andreasandreas00000000000000from __future__ import division, absolute_import, print_function __copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import operator import sys from pytools.decorator import decorator import six from six.moves import range, zip, intern, input from functools import reduce try: decorator_module = __import__("decorator", level=0) except TypeError: # this must be Python 2.4 my_decorator = decorator except ImportError: my_decorator = decorator else: my_decorator = decorator_module.decorator # {{{ math -------------------------------------------------------------------- def delta(x, y): if x == y: return 1 else: return 0 def levi_civita(tup): """Compute an entry of the Levi-Civita tensor for the indices *tuple*.""" if len(tup) == 2: i, j = tup return j-i if len(tup) == 3: i, j, k = tup return (j-i)*(k-i)*(k-j)/2 else: raise NotImplementedError def factorial(n): from operator import mul assert n == int(n) return reduce(mul, (i for i in range(1, n+1)), 1) def perm(n, k): """Return P(n, k), the number of permutations of length k drawn from n choices. """ result = 1 assert k > 0 while k: result *= n n -= 1 k -= 1 return result def comb(n, k): """Return C(n, k), the number of combinations (subsets) of length k drawn from n choices. """ return perm(n, k)//factorial(k) def norm_1(iterable): return sum(abs(x) for x in iterable) def norm_2(iterable): return sum(x**2 for x in iterable)**0.5 def norm_inf(iterable): return max(abs(x) for x in iterable) def norm_p(iterable, p): return sum(i**p for i in iterable)**(1/p) class Norm(object): def __init__(self, p): self.p = p def __call__(self, iterable): return sum(i**self.p for i in iterable)**(1/self.p) # }}} # {{{ data structures # {{{ record class RecordWithoutPickling(object): """An aggregate of named sub-variables. Assumes that each record sub-type will be individually derived from this class. """ __slots__ = [] def __init__(self, valuedict=None, exclude=["self"], **kwargs): assert self.__class__ is not Record try: fields = self.__class__.fields except AttributeError: self.__class__.fields = fields = set() if valuedict is not None: kwargs.update(valuedict) for key, value in six.iteritems(kwargs): if key not in exclude: fields.add(key) setattr(self, key, value) def get_copy_kwargs(self, **kwargs): for f in self.__class__.fields: if f not in kwargs: try: kwargs[f] = getattr(self, f) except AttributeError: pass return kwargs def copy(self, **kwargs): return self.__class__(**self.get_copy_kwargs(**kwargs)) def __repr__(self): return "%s(%s)" % ( self.__class__.__name__, ", ".join("%s=%r" % (fld, getattr(self, fld)) for fld in self.__class__.fields if hasattr(self, fld))) def register_fields(self, new_fields): try: fields = self.__class__.fields except AttributeError: self.__class__.fields = fields = set() fields.update(new_fields) class Record(RecordWithoutPickling): __slots__ = [] def __getstate__(self): return dict( (key, getattr(self, key)) for key in self.__class__.fields if hasattr(self, key)) def __setstate__(self, valuedict): try: fields = self.__class__.fields except AttributeError: self.__class__.fields = fields = set() for key, value in six.iteritems(valuedict): fields.add(key) setattr(self, key, value) def __eq__(self, other): return (self.__class__ == other.__class__ and self.__getstate__() == other.__getstate__()) def __ne__(self, other): return not self.__eq__(other) # }}} class Reference(object): def __init__(self, value): self.value = value def get(self): from warnings import warn warn("Reference.get() is deprecated -- use ref.value instead") return self.value def set(self, value): self.value = value # {{{ dictionary with default class DictionaryWithDefault(object): def __init__(self, default_value_generator, start={}): self._Dictionary = dict(start) self._DefaultGenerator = default_value_generator def __getitem__(self, index): try: return self._Dictionary[index] except KeyError: value = self._DefaultGenerator(index) self._Dictionary[index] = value return value def __setitem__(self, index, value): self._Dictionary[index] = value def __contains__(self, item): return True def iterkeys(self): return six.iterkeys(self._Dictionary) def __iter__(self): return self._Dictionary.__iter__() def iteritems(self): return six.iteritems(self._Dictionary) # }}} class FakeList(object): def __init__(self, f, length): self._Length = length self._Function = f def __len__(self): return self._Length def __getitem__(self, index): try: return [self._Function(i) for i in range(*index.indices(self._Length))] except AttributeError: return self._Function(index) # {{{ dependent dictionary ---------------------------------------------------- class DependentDictionary(object): def __init__(self, f, start={}): self._Function = f self._Dictionary = start.copy() def copy(self): return DependentDictionary(self._Function, self._Dictionary) def __contains__(self, key): try: self[key] return True except KeyError: return False def __getitem__(self, key): try: return self._Dictionary[key] except KeyError: return self._Function(self._Dictionary, key) def __setitem__(self, key, value): self._Dictionary[key] = value def genuineKeys(self): # noqa return list(self._Dictionary.keys()) def iteritems(self): return six.iteritems(self._Dictionary) def iterkeys(self): return six.iterkeys(self._Dictionary) def itervalues(self): return six.itervalues(self._Dictionary) # }}} # }}} # {{{ assertive accessors def one(iterable): """Return the first entry of *iterable*. Assert that *iterable* has only that one entry. """ it = iter(iterable) try: v = next(it) except StopIteration: raise ValueError("empty iterable passed to 'one()'") def no_more(): try: next(it) raise ValueError("iterable with more than one entry passed to 'one()'") except StopIteration: return True assert no_more() return v def is_single_valued(iterable, equality_pred=operator.eq): it = iter(iterable) try: first_item = next(it) except StopIteration: raise ValueError("empty iterable passed to 'single_valued()'") for other_item in it: if not equality_pred(other_item, first_item): return False return True all_equal = is_single_valued def all_roughly_equal(iterable, threshold): return is_single_valued(iterable, equality_pred=lambda a, b: abs(a-b) < threshold) def single_valued(iterable, equality_pred=operator.eq): """Return the first entry of *iterable*; Assert that other entries are the same with the first entry of *iterable*. """ it = iter(iterable) try: first_item = next(it) except StopIteration: raise ValueError("empty iterable passed to 'single_valued()'") def others_same(): for other_item in it: if not equality_pred(other_item, first_item): return False return True assert others_same() return first_item # }}} # {{{ memoization / attribute storage def memoize(*args, **kwargs): """Stores previously computed function values in a cache. Two keyword-only arguments are supported: :arg use_kwargs: Allows the caller to use keyword arguments. Defaults to ``False``. Setting this to ``True`` has a non-negligible performance impact. :arg key: A function receiving the same arguments as the decorated function which computes and returns the cache key. """ use_kw = bool(kwargs.pop('use_kwargs', False)) if use_kw: def default_key_func(*inner_args, **inner_kwargs): return inner_args, frozenset(six.iteritems(inner_kwargs)) else: default_key_func = None key_func = kwargs.pop("key", default_key_func) if kwargs: raise TypeError( "memoize received unexpected keyword arguments: %s" % ", ".join(list(kwargs.keys()))) if key_func is not None: @my_decorator def _deco(func, *args, **kwargs): # by Michele Simionato # http://www.phyast.pitt.edu/~micheles/python/ key = key_func(*args, **kwargs) try: return func._memoize_dic[key] except AttributeError: # _memoize_dic doesn't exist yet. result = func(*args, **kwargs) func._memoize_dic = {key: result} return result except KeyError: result = func(*args, **kwargs) func._memoize_dic[key] = result return result else: @my_decorator def _deco(func, *args): # by Michele Simionato # http://www.phyast.pitt.edu/~micheles/python/ try: return func._memoize_dic[args] except AttributeError: # _memoize_dic doesn't exist yet. result = func(*args) func._memoize_dic = {args: result} return result except KeyError: result = func(*args) func._memoize_dic[args] = result return result if not args: return _deco if callable(args[0]) and len(args) == 1: return _deco(args[0]) raise TypeError( "memoize received unexpected position arguments: %s" % args) FunctionValueCache = memoize class _HasKwargs(object): pass def memoize_method(method): """Supports cache deletion via ``method_name.clear_cache(self)``. .. note:: *clear_cache* support requires Python 2.5 or newer. """ cache_dict_name = intern("_memoize_dic_"+method.__name__) def wrapper(self, *args, **kwargs): if kwargs: key = (_HasKwargs, frozenset(six.iteritems(kwargs))) + args else: key = args try: return getattr(self, cache_dict_name)[key] except AttributeError: result = method(self, *args, **kwargs) setattr(self, cache_dict_name, {key: result}) return result except KeyError: result = method(self, *args, **kwargs) getattr(self, cache_dict_name)[key] = result return result def clear_cache(self): delattr(self, cache_dict_name) if sys.version_info >= (2, 5): from functools import update_wrapper new_wrapper = update_wrapper(wrapper, method) new_wrapper.clear_cache = clear_cache return new_wrapper def memoize_on_first_arg(function): """Like :func:`memoize_method`, but for functions that take the object to do memoization as first argument. Supports cache deletion via ``function_name.clear_cache(self)``. .. note:: *clear_cache* support requires Python 2.5 or newer. """ cache_dict_name = intern("_memoize_dic_" + function.__module__ + function.__name__) def wrapper(obj, *args, **kwargs): if kwargs: key = (_HasKwargs, frozenset(six.iteritems(kwargs))) + args else: key = args try: return getattr(obj, cache_dict_name)[key] except AttributeError: result = function(obj, *args, **kwargs) setattr(obj, cache_dict_name, {key: result}) return result except KeyError: result = function(obj, *args, **kwargs) getattr(obj, cache_dict_name)[key] = result return result def clear_cache(obj): delattr(obj, cache_dict_name) if sys.version_info >= (2, 5): from functools import update_wrapper new_wrapper = update_wrapper(wrapper, function) new_wrapper.clear_cache = clear_cache return new_wrapper def memoize_method_with_uncached(uncached_args=[], uncached_kwargs=set()): """Supports cache deletion via ``method_name.clear_cache(self)``. :arg uncached_args: a list of argument numbers (0-based, not counting 'self' argument) """ # delete starting from the end uncached_args = sorted(uncached_args, reverse=True) uncached_kwargs = list(uncached_kwargs) def parametrized_decorator(method): cache_dict_name = intern("_memoize_dic_"+method.__name__) def wrapper(self, *args, **kwargs): cache_args = list(args) cache_kwargs = kwargs.copy() for i in uncached_args: if i < len(cache_args): cache_args.pop(i) cache_args = tuple(cache_args) if kwargs: for name in uncached_kwargs: cache_kwargs.pop(name, None) key = ( (_HasKwargs, frozenset(six.iteritems(cache_kwargs))) + cache_args) else: key = cache_args try: return getattr(self, cache_dict_name)[key] except AttributeError: result = method(self, *args, **kwargs) setattr(self, cache_dict_name, {key: result}) return result except KeyError: result = method(self, *args, **kwargs) getattr(self, cache_dict_name)[key] = result return result def clear_cache(self): delattr(self, cache_dict_name) if sys.version_info >= (2, 5): from functools import update_wrapper new_wrapper = update_wrapper(wrapper, method) new_wrapper.clear_cache = clear_cache return new_wrapper return parametrized_decorator def memoize_method_nested(inner): """Adds a cache to a function nested inside a method. The cache is attached to *memoize_cache_context* (if it exists) or *self* in the outer (method) namespace. Requires Python 2.5 or newer. """ from warnings import warn warn("memoize_method_nested is deprecated. Use @memoize_in(self, 'identifier') " "instead", DeprecationWarning, stacklevel=2) from functools import wraps cache_dict_name = intern("_memoize_inner_dic_%s_%s_%d" % (inner.__name__, inner.__code__.co_filename, inner.__code__.co_firstlineno)) from inspect import currentframe outer_frame = currentframe().f_back cache_context = outer_frame.f_locals.get("memoize_cache_context") if cache_context is None: cache_context = outer_frame.f_locals.get("self") try: cache_dict = getattr(cache_context, cache_dict_name) except AttributeError: cache_dict = {} setattr(cache_context, cache_dict_name, cache_dict) @wraps(inner) def new_inner(*args): try: return cache_dict[args] except KeyError: result = inner(*args) cache_dict[args] = result return result return new_inner class memoize_in(object): # noqa """Adds a cache to a function nested inside a method. The cache is attached to *memoize_cache_context* (if it exists) or *self* in the outer (method) namespace. Requires Python 2.5 or newer. """ def __init__(self, container, identifier): key = "_pytools_memoize_in_dict_for_"+identifier try: self.cache_dict = getattr(container, key) except AttributeError: self.cache_dict = {} setattr(container, key, self.cache_dict) def __call__(self, inner): from functools import wraps @wraps(inner) def new_inner(*args): try: return self.cache_dict[args] except KeyError: result = inner(*args) self.cache_dict[args] = result return result return new_inner # }}} # {{{ syntactical sugar class InfixOperator: """Pseudo-infix operators that allow syntax of the kind `op1 <> op2'. Following a recipe from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/384122 """ def __init__(self, function): self.function = function def __rlshift__(self, other): return InfixOperator(lambda x: self.function(other, x)) def __rshift__(self, other): return self.function(other) def call(self, a, b): return self.function(a, b) def monkeypatch_method(cls): # from GvR, http://mail.python.org/pipermail/python-dev/2008-January/076194.html def decorator(func): setattr(cls, func.__name__, func) return func return decorator def monkeypatch_class(name, bases, namespace): # from GvR, http://mail.python.org/pipermail/python-dev/2008-January/076194.html assert len(bases) == 1, "Exactly one base class required" base = bases[0] for name, value in six.iteritems(namespace): if name != "__metaclass__": setattr(base, name, value) return base # }}} # {{{ generic utilities def add_tuples(t1, t2): return tuple([t1v + t2v for t1v, t2v in zip(t1, t2)]) def negate_tuple(t1): return tuple([-t1v for t1v in t1]) def shift(vec, dist): """Return a copy of C{vec} shifted by C{dist}. @postcondition: C{shift(a, i)[j] == a[(i+j) % len(a)]} """ result = vec[:] N = len(vec) # noqa dist = dist % N # modulo only returns positive distances! if dist > 0: result[dist:] = vec[:N-dist] result[:dist] = vec[N-dist:] return result def len_iterable(iterable): return sum(1 for i in iterable) def flatten(list): """For an iterable of sub-iterables, generate each member of each sub-iterable in turn, i.e. a flattened version of that super-iterable. Example: Turn [[a,b,c],[d,e,f]] into [a,b,c,d,e,f]. """ for sublist in list: for j in sublist: yield j def general_sum(sequence): return reduce(operator.add, sequence) def linear_combination(coefficients, vectors): result = coefficients[0] * vectors[0] for c, v in zip(coefficients, vectors)[1:]: result += c*v return result def common_prefix(iterable, empty=None): it = iter(iterable) try: pfx = next(it) except StopIteration: return empty for v in it: for j in range(len(pfx)): if pfx[j] != v[j]: pfx = pfx[:j] if j == 0: return pfx break return pfx def decorate(function, list): return [(x, function(x)) for x in list] def partition(criterion, list): part_true = [] part_false = [] for i in list: if criterion(i): part_true.append(i) else: part_false.append(i) return part_true, part_false def partition2(iterable): part_true = [] part_false = [] for pred, i in iterable: if pred: part_true.append(i) else: part_false.append(i) return part_true, part_false def product(iterable): from operator import mul return reduce(mul, iterable, 1) try: all = __builtins__.all any = __builtins__.any except AttributeError: def all(iterable): for i in iterable: if not i: return False return True def any(iterable): for i in iterable: if i: return True return False def reverse_dictionary(the_dict): result = {} for key, value in six.iteritems(the_dict): if value in result: raise RuntimeError( "non-reversible mapping, duplicate key '%s'" % value) result[value] = key return result def set_sum(set_iterable): from operator import or_ return reduce(or_, set_iterable, set()) def div_ceil(nr, dr): return -(-nr // dr) def uniform_interval_splitting(n, granularity, max_intervals): """ Return *(interval_size, num_intervals)* such that:: num_intervals * interval_size >= n and:: (num_intervals - 1) * interval_size < n and *interval_size* is a multiple of *granularity*. """ # ported from Thrust -- minor Apache v2 license violation grains = div_ceil(n, granularity) # one grain per interval if grains <= max_intervals: return granularity, grains grains_per_interval = div_ceil(grains, max_intervals) interval_size = grains_per_interval * granularity num_intervals = div_ceil(n, interval_size) return interval_size, num_intervals def find_max_where(predicate, prec=1e-5, initial_guess=1, fail_bound=1e38): """Find the largest value for which a predicate is true, along a half-line. 0 is assumed to be the lower bound.""" # {{{ establish bracket mag = 1 if predicate(mag): mag *= 2 while predicate(mag): mag *= 2 if mag > fail_bound: raise RuntimeError("predicate appears to be true " "everywhere, up to %g" % fail_bound) lower_true = mag/2 upper_false = mag else: mag /= 2 while not predicate(mag): mag /= 2 if mag < prec: return mag lower_true = mag upper_false = mag*2 # }}} # {{{ refine # Refine a bracket between *lower_true*, where the predicate is true, # and *upper_false*, where it is false, until *prec* is satisfied. assert predicate(lower_true) assert not predicate(upper_false) while abs(lower_true-upper_false) > prec: mid = (lower_true+upper_false)/2 if predicate(mid): lower_true = mid else: upper_false = mid else: return lower_true # }}} # }}} # {{{ argmin, argmax def argmin2(iterable, return_value=False): it = iter(iterable) try: current_argmin, current_min = next(it) except StopIteration: raise ValueError("argmin of empty iterable") for arg, item in it: if item < current_min: current_argmin = arg current_min = item if return_value: return current_argmin, current_min else: return current_argmin def argmax2(iterable, return_value=False): it = iter(iterable) try: current_argmax, current_max = next(it) except StopIteration: raise ValueError("argmax of empty iterable") for arg, item in it: if item > current_max: current_argmax = arg current_max = item if return_value: return current_argmax, current_max else: return current_argmax def argmin(iterable): return argmin2(enumerate(iterable)) def argmax(iterable): return argmax2(enumerate(iterable)) # }}} # {{{ cartesian products etc. def cartesian_product(list1, list2): for i in list1: for j in list2: yield (i, j) def distinct_pairs(list1, list2): for i, xi in enumerate(list1): for j, yj in enumerate(list2): if i != j: yield (xi, yj) def cartesian_product_sum(list1, list2): """This routine returns a list of sums of each element of list1 with each element of list2. Also works with lists. """ for i in list1: for j in list2: yield i+j # }}} # {{{ elementary statistics def average(iterable): """Return the average of the values in iterable. iterable may not be empty. """ it = iterable.__iter__() try: sum = next(it) count = 1 except StopIteration: raise ValueError("empty average") for value in it: sum = sum + value count += 1 return sum/count class VarianceAggregator: """Online variance calculator. See http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance Adheres to pysqlite's aggregate interface. """ def __init__(self, entire_pop): self.n = 0 self.mean = 0 self.m2 = 0 self.entire_pop = entire_pop def step(self, x): self.n += 1 delta = x - self.mean self.mean += delta/self.n self.m2 += delta*(x - self.mean) def finalize(self): if self.entire_pop: if self.n == 0: return None else: return self.m2/self.n else: if self.n <= 1: return None else: return self.m2/(self.n - 1) def variance(iterable, entire_pop): v_comp = VarianceAggregator(entire_pop) for x in iterable: v_comp.step(x) return v_comp.finalize() def std_deviation(iterable, finite_pop): from math import sqrt return sqrt(variance(iterable, finite_pop)) # }}} # {{{ permutations, tuples, integer sequences def wandering_element(length, wanderer=1, landscape=0): for i in range(length): yield i*(landscape,) + (wanderer,) + (length-1-i)*(landscape,) def indices_in_shape(shape): if len(shape) == 0: yield () elif len(shape) == 1: for i in range(0, shape[0]): yield (i,) else: remainder = shape[1:] for i in range(0, shape[0]): for rest in indices_in_shape(remainder): yield (i,)+rest def generate_nonnegative_integer_tuples_below(n, length=None, least=0): """n may be a sequence, in which case length must be None.""" if length is None: if len(n) == 0: yield () return my_n = n[0] n = n[1:] next_length = None else: my_n = n assert length >= 0 if length == 0: yield () return next_length = length-1 for i in range(least, my_n): my_part = (i,) for base in generate_nonnegative_integer_tuples_below(n, next_length, least): yield my_part + base def generate_decreasing_nonnegative_tuples_summing_to(n, length, min=0, max=None): if length == 0: yield () elif length == 1: if n <= max: #print "MX", n, max yield (n,) else: return else: if max is None or n < max: max = n for i in range(min, max+1): #print "SIG", sig, i for remainder in generate_decreasing_nonnegative_tuples_summing_to( n-i, length-1, min, i): yield (i,) + remainder def generate_nonnegative_integer_tuples_summing_to_at_most(n, length): """Enumerate all non-negative integer tuples summing to at most n, exhausting the search space by varying the first entry fastest, and the last entry the slowest. """ assert length >= 0 if length == 0: yield () else: for i in range(n+1): for remainder in generate_nonnegative_integer_tuples_summing_to_at_most( n-i, length-1): yield remainder + (i,) def generate_all_nonnegative_integer_tuples(length, least=0): assert length >= 0 current_max = least while True: for max_pos in range(length): for prebase in generate_nonnegative_integer_tuples_below( current_max, max_pos, least): for postbase in generate_nonnegative_integer_tuples_below( current_max+1, length-max_pos-1, least): yield prebase + [current_max] + postbase current_max += 1 # backwards compatibility generate_positive_integer_tuples_below = generate_nonnegative_integer_tuples_below generate_all_positive_integer_tuples = generate_all_nonnegative_integer_tuples def _pos_and_neg_adaptor(tuple_iter): for tup in tuple_iter: nonzero_indices = [i for i in range(len(tup)) if tup[i] != 0] for do_neg_tup in generate_nonnegative_integer_tuples_below( 2, len(nonzero_indices)): this_result = list(tup) for index, do_neg in enumerate(do_neg_tup): if do_neg: this_result[nonzero_indices[index]] *= -1 yield tuple(this_result) def generate_all_integer_tuples_below(n, length, least_abs=0): return _pos_and_neg_adaptor(generate_nonnegative_integer_tuples_below( n, length, least_abs)) def generate_all_integer_tuples(length, least_abs=0): return _pos_and_neg_adaptor(generate_all_nonnegative_integer_tuples( length, least_abs)) def generate_permutations(original): """Generate all permutations of the list `original'. Nicked from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/252178 """ if len(original) <= 1: yield original else: for perm in generate_permutations(original[1:]): for i in range(len(perm)+1): #nb str[0:1] works in both string and list contexts yield perm[:i] + original[0:1] + perm[i:] def generate_unique_permutations(original): """Generate all unique permutations of the list `original'. """ had_those = set() for perm in generate_permutations(original): if perm not in had_those: had_those.add(perm) yield perm def enumerate_basic_directions(dimensions): coordinate_list = [[0], [1], [-1]] return reduce(cartesian_product_sum, [coordinate_list] * dimensions)[1:] # }}} # {{{ index mangling def get_read_from_map_from_permutation(original, permuted): """With a permutation given by C{original} and C{permuted}, generate a list C{rfm} of indices such that C{permuted[i] == original[rfm[i]]}. Requires that the permutation can be inferred from C{original} and C{permuted}. >>> for p1 in generate_permutations(range(5)): ... for p2 in generate_permutations(range(5)): ... rfm = get_read_from_map_from_permutation(p1, p2) ... p2a = [p1[rfm[i]] for i in range(len(p1))] ... assert p2 == p2a """ assert len(original) == len(permuted) where_in_original = dict( (original[i], i) for i in range(len(original))) assert len(where_in_original) == len(original) return tuple(where_in_original[pi] for pi in permuted) def get_write_to_map_from_permutation(original, permuted): """With a permutation given by C{original} and C{permuted}, generate a list C{wtm} of indices such that C{permuted[wtm[i]] == original[i]}. Requires that the permutation can be inferred from C{original} and C{permuted}. >>> for p1 in generate_permutations(range(5)): ... for p2 in generate_permutations(range(5)): ... wtm = get_write_to_map_from_permutation(p1, p2) ... p2a = [0] * len(p2) ... for i, oi in enumerate(p1): ... p2a[wtm[i]] = oi ... assert p2 == p2a """ assert len(original) == len(permuted) where_in_permuted = dict( (permuted[i], i) for i in range(len(permuted))) assert len(where_in_permuted) == len(permuted) return tuple(where_in_permuted[oi] for oi in original) # }}} # {{{ graph algorithms def a_star(initial_state, goal_state, neighbor_map, estimate_remaining_cost=None, get_step_cost=lambda x, y: 1): """ With the default cost and heuristic, this amounts to Dijkstra's algorithm. """ from heapq import heappop, heappush if estimate_remaining_cost is None: def estimate_remaining_cost(x): if x != goal_state: return 1 else: return 0 class AStarNode(object): __slots__ = ["state", "parent", "path_cost"] def __init__(self, state, parent, path_cost): self.state = state self.parent = parent self.path_cost = path_cost inf = float("inf") init_remcost = estimate_remaining_cost(initial_state) assert init_remcost != inf queue = [(init_remcost, AStarNode(initial_state, parent=None, path_cost=0))] visited_states = set() while len(queue): _, top = heappop(queue) visited_states.add(top.state) if top.state == goal_state: result = [] it = top while it is not None: result.append(it.state) it = it.parent return result[::-1] for state in neighbor_map[top.state]: if state in visited_states: continue remaining_cost = estimate_remaining_cost(state) if remaining_cost == inf: continue step_cost = get_step_cost(top, state) estimated_path_cost = top.path_cost+step_cost+remaining_cost heappush(queue, (estimated_path_cost, AStarNode(state, top, path_cost=top.path_cost + step_cost))) raise RuntimeError("no solution") # }}} # {{{ formatting # {{{ table formatting class Table: """An ASCII table generator.""" def __init__(self): self.rows = [] def add_row(self, row): self.rows.append([str(i) for i in row]) def __str__(self): columns = len(self.rows[0]) col_widths = [max(len(row[i]) for row in self.rows) for i in range(columns)] lines = [ "|".join([cell.ljust(col_width) for cell, col_width in zip(row, col_widths)]) for row in self.rows] lines[1:1] = ["+".join("-"*col_width for col_width in col_widths)] return "\n".join(lines) def latex(self, skip_lines=0, hline_after=[]): lines = [] for row_nr, row in list(enumerate(self.rows))[skip_lines:]: lines.append(" & ".join(row)+r" \\") if row_nr in hline_after: lines.append(r"\hline") return "\n".join(lines) # }}} # {{{ histogram formatting def string_histogram(iterable, min_value=None, max_value=None, bin_count=20, width=70, bin_starts=None, use_unicode=True): if bin_starts is None: if min_value is None or max_value is None: iterable = list(iterable) min_value = min(iterable) max_value = max(iterable) bin_width = (max_value - min_value)/bin_count bin_starts = [min_value+bin_width*i for i in range(bin_count)] bins = [0 for i in range(len(bin_starts))] from bisect import bisect for value in iterable: if max_value is not None and value > max_value or value < bin_starts[0]: from warnings import warn warn("string_histogram: out-of-bounds value ignored") else: bin_nr = bisect(bin_starts, value)-1 try: bins[bin_nr] += 1 except: print(value, bin_nr, bin_starts) raise from math import floor, ceil if use_unicode: def format_bar(cnt): scaled = cnt*width/max_count full = int(floor(scaled)) eighths = int(ceil((scaled-full)*8)) if eighths: return full*unichr(0x2588) + unichr(0x2588+(8-eighths)) else: return full*unichr(0x2588) else: def format_bar(cnt): return int(ceil(cnt*width/max_count))*"#" max_count = max(bins) total_count = sum(bins) return "\n".join("%9g |%9d | %3.0f %% | %s" % ( bin_start, bin_value, bin_value/total_count*100, format_bar(bin_value)) for bin_start, bin_value in zip(bin_starts, bins)) # }}} def word_wrap(text, width, wrap_using="\n"): # http://code.activestate.com/recipes/148061-one-liner-word-wrap-function/ """ A word-wrap function that preserves existing line breaks and most spaces in the text. Expects that existing line breaks are posix newlines (\n). """ space_or_break = [" ", wrap_using] return reduce(lambda line, word, width=width: '%s%s%s' % (line, space_or_break[(len(line)-line.rfind('\n')-1 + len(word.split('\n', 1)[0]) >= width)], word), text.split(' ') ) # }}} # {{{ command line interfaces ------------------------------------------------- def _exec_arg(arg, execenv): import os if os.access(arg, os.F_OK): exec(compile(open(arg, "r"), arg, 'exec'), execenv) else: exec(compile(arg, "", 'exec'), execenv) class CPyUserInterface(object): class Parameters(Record): pass def __init__(self, variables, constants={}, doc={}): self.variables = variables self.constants = constants self.doc = doc def show_usage(self, progname): print("usage: %s " % progname) print() print("FILE-OR-STATEMENTS may either be Python statements of the form") print("'variable1 = value1; variable2 = value2' or the name of a file") print("containing such statements. Any valid Python code may be used") print("on the command line or in a command file. If new variables are") print("used, they must start with 'user_' or just '_'.") print() print("The following variables are recognized:") for v in sorted(self.variables): print(" %s = %s" % (v, self.variables[v])) if v in self.doc: print(" %s" % self.doc[v]) print() print("The following constants are supplied:") for c in sorted(self.constants): print(" %s = %s" % (c, self.constants[c])) if c in self.doc: print(" %s" % self.doc[c]) def gather(self, argv=None): import sys if argv is None: argv = sys.argv if len(argv) == 1 or ( ("-h" in argv) or ("help" in argv) or ("-help" in argv) or ("--help" in argv)): self.show_usage(argv[0]) sys.exit(2) execenv = self.variables.copy() execenv.update(self.constants) for arg in argv[1:]: _exec_arg(arg, execenv) # check if the user set invalid keys for added_key in ( set(execenv.keys()) - set(self.variables.keys()) - set(self.constants.keys())): if not (added_key.startswith("user_") or added_key.startswith("_")): raise ValueError( "invalid setup key: '%s' " "(user variables must start with 'user_' or '_')" % added_key) result = self.Parameters(dict((key, execenv[key]) for key in self.variables)) self.validate(result) return result def validate(self, setup): pass # }}} # {{{ code maintenance class MovedFunctionDeprecationWrapper: def __init__(self, f): self.f = f def __call__(self, *args, **kwargs): from warnings import warn warn("This function is deprecated. Use %s.%s instead." % ( self.f.__module__, self.f.__name__), DeprecationWarning, stacklevel=2) return self.f(*args, **kwargs) # }}} # {{{ debugging class StderrToStdout(object): def __enter__(self): import sys self.stderr_backup = sys.stderr sys.stderr = sys.stdout def __exit__(self, exc_type, exc_val, exc_tb): import sys sys.stderr = self.stderr_backup del self.stderr_backup def typedump(val, max_seq=5, special_handlers={}): try: hdlr = special_handlers[type(val)] except KeyError: pass else: return hdlr(val) try: len(val) except TypeError: return type(val).__name__ else: if isinstance(val, dict): return "{%s}" % ( ", ".join( "%r: %s" % (str(k), typedump(v)) for k, v in six.iteritems(val))) try: if len(val) > max_seq: return "%s(%s,...)" % ( type(val).__name__, ",".join(typedump(x, max_seq, special_handlers) for x in val[:max_seq])) else: return "%s(%s)" % ( type(val).__name__, ",".join(typedump(x, max_seq, special_handlers) for x in val)) except TypeError: return val.__class__.__name__ def invoke_editor(s, filename="edit.txt", descr="the file"): from tempfile import mkdtemp tempdir = mkdtemp() from os.path import join full_name = join(tempdir, filename) outf = open(full_name, "w") outf.write(str(s)) outf.close() import os if "EDITOR" in os.environ: from subprocess import Popen p = Popen([os.environ["EDITOR"], full_name]) os.waitpid(p.pid, 0)[1] else: print("(Set the EDITOR environment variable to be " "dropped directly into an editor next time.)") input("Edit %s at %s now, then hit [Enter]:" % (descr, full_name)) inf = open(full_name, "r") result = inf.read() inf.close() return result # }}} # {{{ progress bars class ProgressBar: def __init__(self, descr, total, initial=0, length=40): import time self.description = descr self.total = total self.done = initial self.length = length self.last_squares = -1 self.start_time = time.time() self.last_update_time = self.start_time self.speed_meas_start_time = self.start_time self.speed_meas_start_done = initial self.time_per_step = None def draw(self): import time now = time.time() squares = int(self.done/self.total*self.length) if squares != self.last_squares or now-self.last_update_time > 0.5: if (self.done != self.speed_meas_start_done and now-self.speed_meas_start_time > 3): new_time_per_step = (now-self.speed_meas_start_time) \ / (self.done-self.speed_meas_start_done) if self.time_per_step is not None: self.time_per_step = (new_time_per_step + self.time_per_step)/2 else: self.time_per_step = new_time_per_step self.speed_meas_start_time = now self.speed_meas_start_done = self.done if self.time_per_step is not None: eta_str = "%7.1fs " % max( 0, (self.total-self.done) * self.time_per_step) else: eta_str = "?" import sys sys.stderr.write("%-20s [%s] ETA %s\r" % ( self.description, squares*"#"+(self.length-squares)*" ", eta_str)) self.last_squares = squares self.last_update_time = now def progress(self, steps=1): self.set_progress(self.done + steps) def set_progress(self, done): self.done = done self.draw() def finished(self): import sys self.set_progress(self.total) sys.stderr.write("\n") def __enter__(self): self.draw() def __exit__(self, exc_type, exc_val, exc_tb): self.finished() # }}} # {{{ file system related def assert_not_a_file(name): import os if os.access(name, os.F_OK): raise IOError("file `%s' already exists" % name) def add_python_path_relative_to_script(rel_path): import sys from os.path import dirname, join, abspath script_name = sys.argv[0] rel_script_dir = dirname(script_name) sys.path.append(abspath(join(rel_script_dir, rel_path))) # }}} # {{{ numpy dtype mangling def common_dtype(dtypes, default=None): dtypes = list(dtypes) if dtypes: return argmax2((dtype, dtype.num) for dtype in dtypes) else: if default is not None: return default else: raise ValueError( "cannot find common dtype of empty dtype list") def to_uncomplex_dtype(dtype): import numpy if dtype == numpy.complex64: return numpy.float32 elif dtype == numpy.complex128: return numpy.float64 if dtype == numpy.float32: return numpy.float32 elif dtype == numpy.float64: return numpy.float64 else: raise TypeError("unrecgonized dtype '%s'" % dtype) def match_precision(dtype, dtype_to_match): import numpy tgt_is_double = dtype_to_match in [ numpy.float64, numpy.complex128] dtype_is_complex = dtype.kind == "c" if dtype_is_complex: if tgt_is_double: return numpy.dtype(numpy.complex128) else: return numpy.dtype(numpy.complex64) else: if tgt_is_double: return numpy.dtype(numpy.float64) else: return numpy.dtype(numpy.float32) # }}} # {{{ unique name generation def generate_unique_names(prefix): yield prefix try_num = 0 while True: yield "%s_%d" % (prefix, try_num) try_num += 1 generate_unique_possibilities = MovedFunctionDeprecationWrapper( generate_unique_names) class UniqueNameGenerator: def __init__(self, existing_names=set(), forced_prefix=""): self.existing_names = existing_names.copy() self.forced_prefix = forced_prefix def is_name_conflicting(self, name): return name in self.existing_names def add_name(self, name): if self.is_name_conflicting(name): raise ValueError("name '%s' conflicts with existing names") if not name.startswith(self.forced_prefix): raise ValueError("name '%s' does not start with required prefix") self.existing_names.add(name) def add_names(self, names): for name in names: self.add_name(name) def __call__(self, based_on="id"): based_on = self.forced_prefix + based_on for var_name in generate_unique_names(based_on): if not self.is_name_conflicting(var_name): break var_name = intern(var_name) self.existing_names.add(var_name) return var_name # }}} def _test(): import doctest doctest.testmod() if __name__ == "__main__": _test() # vim: foldmethod=marker pytools-2015.1.6/pytools/arithmetic_container.py0000644000175000017500000003612412610520312023007 0ustar andreasandreas00000000000000from __future__ import division from __future__ import absolute_import from .decorator import decorator import operator import six from six.moves import range from six.moves import zip class ArithmeticList(list): """A list with elementwise arithmetic operations.""" def assert_same_length(self, other): assert len(self) == len(other) def unary_operator(self, operator): return ArithmeticList(operator(v) for v in self) def binary_operator(self, other, operator): if not isinstance(other, ArithmeticList): return ArithmeticList(operator(v, other) for v in self) self.assert_same_length(other) return ArithmeticList(operator(v, w) for v, w in zip(self, other)) def reverse_binary_operator(self, other, operator): if not isinstance(other, ArithmeticList): return ArithmeticList(operator(other, v) for v in self) self.assert_same_length(other) return ArithmeticList(operator(w, v) for v, w in zip(self, other)) def __neg__(self): return self.unary_operator(operator.neg) def __pos__(self): return self.unary_operator(operator.pos) def __abs__(self): return self.unary_operator(operator.abs) def __invert__(self): return self.unary_operator(operator.invert) def __add__(self, other): return self.binary_operator(other, operator.add) def __sub__(self, other): return self.binary_operator(other, operator.sub) def __mul__(self, other): return self.binary_operator(other, operator.mul) def __div__(self, other): return self.binary_operator(other, operator.div) def __truediv__(self, other): return self.binary_operator(other, operator.truediv) def __floordiv__(self, other): return self.binary_operator(other, operator.floordiv) def __mod__(self, other): return self.binary_operator(other, operator.mod) def __pow__(self, other): return self.binary_operator(other, operator.pow) def __lshift__(self, other): return self.binary_operator(other, operator.lshift) def __rshift__(self, other): return self.binary_operator(other, operator.rshift) def __and__(self, other): return self.binary_operator(other, operator.and_) def __or__(self, other): return self.binary_operator(other, operator.or_) def __xor__(self, other): return self.binary_operator(other, operator.xor) def __radd__(self, other): return self.reverse_binary_operator(other, operator.add) def __rsub__(self, other): return self.reverse_binary_operator(other, operator.sub) def __rmul__(self, other): return self.reverse_binary_operator(other, operator.mul) def __rdiv__(self, other): return self.reverse_binary_operator(other, operator.div) def __rtruediv__(self, other): return self.reverse_binary_operator(other, operator.truediv) def __rfloordiv__(self, other): return self.reverse_binary_operator(other, operator.floordiv) def __rmod__(self, other): return self.reverse_binary_operator(other, operator.mod) def __rpow__(self, other): return self.reverse_binary_operator(other, operator.pow) def __rlshift__(self, other): return self.reverse_binary_operator(other, operator.lshift) def __rrshift__(self, other): return self.reverse_binary_operator(other, operator.rshift) def __rand__(self, other): return self.reverse_binary_operator(other, operator.and_) def __ror__(self, other): return self.reverse_binary_operator(other, operator.or_) def __rxor__(self, other): return self.reverse_binary_operator(other, operator.xor) def __iadd__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] += other[i] return self def __isub__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] -= other[i] return self def __imul__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] *= other[i] return self def __idiv__(self, other): from operator import div self.assert_same_length(other) for i in range(len(self)): self[i] = div(self[i], other[i]) return self def __itruediv__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] /= other[i] return self def __ifloordiv__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] //= other[i] return self def __imod__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] %= other[i] return self def __ipow__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] **= other[i] return self def __ilshift__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] <<= other[i] return self def __irshift__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] >>= other[i] return self def __iand__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] &= other[i] return self def __ior__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] |= other[i] return self def __ixor__(self, other): self.assert_same_length(other) for i in range(len(self)): self[i] ^= other[i] return self def __getslice__(self, i, j): return ArithmeticList(list.__getslice__(self, i, j)) def __str__(self): return "ArithmeticList(%s)" % list.__repr__(self) def __repr__(self): return "ArithmeticList(%s)" % list.__repr__(self) def plus(self, other): """Return a copy of self extended by the entries from the iterable C{other}. Makes up for the loss of the C{+} operator (which is now arithmetic). """ result = ArithmeticList(self) result.extend(other) return result def join_fields(*fields): result = ArithmeticList() for f in fields: if isinstance(f, (ArithmeticList, list)): result.extend(f) else: result.append(f) return result @decorator def work_with_arithmetic_containers(f, *args, **kwargs): """This decorator allows simple elementwise functions to automatically accept containers of arithmetic types, by acting on each element. At present, it only works for ArithmeticList. """ class SimpleArg: def __init__(self, arg_number): self.arg_number = arg_number def eval(self, current_tp): return args[self.arg_number] class SimpleKwArg: def __init__(self, arg_name): self.arg_name = arg_name def eval(self, current_tp): return kwargs[self.arg_name] class ListArg: def __init__(self, list_number): self.list_number = list_number def eval(self, current_tp): return current_tp[self.list_number] lists = [] formal_args = [] formal_kwargs = {} for arg in args: if isinstance(arg, ArithmeticList): formal_args.append(ListArg(len(lists))) lists.append(arg) else: formal_args.append(SimpleArg(len(formal_args))) for name, arg in six.iteritems(kwargs): if isinstance(arg, ArithmeticList): formal_kwargs[name] = ListArg(len(lists)) lists.append(arg) else: formal_kwargs[name] = SimpleKwArg(name) if lists: from pytools import all_equal assert all_equal(len(lst) for lst in lists) return ArithmeticList( f( *list(formal_arg.eval(tp) for formal_arg in formal_args), **dict((name, formal_arg.eval(tp)) for name, formal_arg in six.iteritems(formal_kwargs)) ) for tp in zip(*lists)) else: return f(*args, **kwargs) def outer_product(al1, al2, mult_op=operator.mul): return ArithmeticListMatrix( [[mult_op(al1i, al2i) for al2i in al2] for al1i in al1] ) class ArithmeticListMatrix: """A matrix type that operates on L{ArithmeticLists}.""" def __init__(self, matrix): """Initialize the ArithmeticListMatrix. C{matrix} must allow the following interface: - len(matrix) gives the height of the matrix. - matrix is iterable, giving the rows of the matrix. Each row, in turn, must support C{len()} and iteration. """ self.matrix = matrix def times(self, other, mult_op): if not isinstance(other, ArithmeticList): raise NotImplementedError result = ArithmeticList(None for i in range(len(self.matrix))) for i, row in enumerate(self.matrix): if len(row) != len(other): raise ValueError("matrix width does not match ArithmeticList") for j, entry in enumerate(row): if not isinstance(entry, (int, float)) or entry: if not isinstance(entry, (int, float)) or entry != 1: contrib = mult_op(entry, other[j]) else: contrib = other[j] if result[i] is None: result[i] = contrib else: result[i] += contrib for i in range(len(result)): if result[i] is None and len(other): result[i] = 0 * other[0] return result def __mul__(self, other): if not isinstance(other, ArithmeticList): return NotImplemented from operator import mul return self.times(other, mul) def map(self, entry_map): return ArithmeticListMatrix([[ entry_map(entry) for j, entry in enumerate(row)] for i, row in enumerate(self.matrix)]) class ArithmeticDictionary(dict): """A dictionary with elementwise (on the values, not the keys) arithmetic operations.""" def _get_empty_self(self): return ArithmeticDictionary() def assert_same_keys(self, other): for key in self: assert key in other for key in other: assert key in self def unary_operator(self, operator): result = self._get_empty_self() for key in self: result[key] = operator(self[key]) return result def binary_operator(self, other, operator): try: self.assert_same_keys(other) result = self._get_empty_self() for key in self: result[key] = operator(self[key], other[key]) return result except TypeError: result = self._get_empty_self() for key in self: result[key] = operator(self[key], other) return result def reverse_binary_operator(self, other, operator): try: self.assert_same_keys(other) result = self._get_empty_self() for key in self: result[key] = operator(other[key], self[key]) return result except TypeError: result = self._get_empty_self() for key in self: result[key] = operator(other, self[key]) return result def __neg__(self): return self.unary_operator(operator.neg) def __pos__(self): return self.unary_operator(operator.pos) def __abs__(self): return self.unary_operator(operator.abs) def __invert__(self): return self.unary_operator(operator.invert) def __add__(self, other): return self.binary_operator(other, operator.add) def __sub__(self, other): return self.binary_operator(other, operator.sub) def __mul__(self, other): return self.binary_operator(other, operator.mul) def __div__(self, other): return self.binary_operator(other, operator.div) def __mod__(self, other): return self.binary_operator(other, operator.mod) def __pow__(self, other): return self.binary_operator(other, operator.pow) def __lshift__(self, other): return self.binary_operator(other, operator.lshift) def __rshift__(self, other): return self.binary_operator(other, operator.rshift) def __and__(self, other): return self.binary_operator(other, operator.and_) def __or__(self, other): return self.binary_operator(other, operator.or_) def __xor__(self, other): return self.binary_operator(other, operator.xor) def __radd__(self, other): return self.reverse_binary_operator(other, operator.add) def __rsub__(self, other): return self.reverse_binary_operator(other, operator.sub) def __rmul__(self, other): return self.reverse_binary_operator(other, operator.mul) def __rdiv__(self, other): return self.reverse_binary_operator(other, operator.div) def __rmod__(self, other): return self.reverse_binary_operator(other, operator.mod) def __rpow__(self, other): return self.reverse_binary_operator(other, operator.pow) def __rlshift__(self, other): return self.reverse_binary_operator(other, operator.lshift) def __rrshift__(self, other): return self.reverse_binary_operator(other, operator.rshift) def __rand__(self, other): return self.reverse_binary_operator(other, operator.and_) def __ror__(self, other): return self.reverse_binary_operator(other, operator.or_) def __rxor__(self, other): return self.reverse_binary_operator(other, operator.xor) def __iadd__(self, other): self.assert_same_keys(other) for key in self: self[key] += other[key] return self def __isub__(self, other): self.assert_same_keys(other) for key in self: self[key] -= other[key] return self def __imul__(self, other): self.assert_same_keys(other) for key in self: self[key] *= other[key] return self def __idiv__(self, other): self.assert_same_keys(other) for key in self: self[key] /= other[key] return self def __imod__(self, other): self.assert_same_keys(other) for key in self: self[key] %= other[key] return self def __ipow__(self, other): self.assert_same_keys(other) for key in self: self[key] **= other[key] return self def __ilshift__(self, other): self.assert_same_keys(other) for key in self: self[key] <<= other[key] return self def __irshift__(self, other): self.assert_same_keys(other) for key in self: self[key] >>= other[key] return self def __iand__(self, other): self.assert_same_keys(other) for key in self: self[key] &= other[key] return self def __ior__(self, other): self.assert_same_keys(other) for key in self: self[key] |= other[key] return self def __ixor__(self, other): self.assert_same_keys(other) for key in self: self[key] ^= other[key] return self pytools-2015.1.6/pytools/batchjob.py0000644000175000017500000001014512610520312020363 0ustar andreasandreas00000000000000from __future__ import absolute_import import six def _cp(src, dest): from pytools import assert_not_a_file assert_not_a_file(dest) inf = open(src, "rb") try: outf = open(dest, "wb") try: outf.write(inf.read()) finally: outf.close() finally: inf.close() def get_timestamp(): from datetime import datetime return datetime.now().strftime("%Y-%m-%d-%H%M%S") class BatchJob(object): def __init__(self, moniker, main_file, aux_files=[], timestamp=None): import os import os.path if timestamp is None: timestamp = get_timestamp() self.moniker = ( moniker .replace("/", "-") .replace("-$DATE", "") .replace("$DATE-", "") .replace("$DATE", "") ) self.subdir = moniker.replace("$DATE", timestamp) self.path = os.path.join( os.getcwd(), self.subdir) os.makedirs(self.path) runscript = open("%s/run.sh" % self.path, "w") import sys runscript.write("%s %s setup.cpy" % (sys.executable, main_file)) runscript.close() from os.path import basename if not main_file.startswith("-m "): _cp(main_file, os.path.join(self.path, basename(main_file))) for aux_file in aux_files: _cp(aux_file, os.path.join(self.path, basename(aux_file))) def write_setup(self, lines): import os.path setup = open(os.path.join(self.path, "setup.cpy"), "w") setup.write("\n".join(lines)) setup.close() class INHERIT(object): pass class GridEngineJob(BatchJob): def submit(self, env={"LD_LIBRARY_PATH": INHERIT, "PYTHONPATH": INHERIT}, memory_megs=None, extra_args=[]): from subprocess import Popen args = [ "-N", self.moniker, "-cwd", ] from os import getenv for var, value in six.iteritems(env): if value is INHERIT: value = getenv(var) args += ["-v", "%s=%s" % (var, value)] if memory_megs is not None: args.extend(["-l", "mem=%d" % memory_megs]) args.extend(extra_args) subproc = Popen(["qsub"] + args + ["run.sh"], cwd=self.path) if subproc.wait() != 0: raise RuntimeError("Process submission of %s failed" % self.moniker) class PBSJob(BatchJob): def submit(self, env={"LD_LIBRARY_PATH": INHERIT, "PYTHONPATH": INHERIT}, memory_megs=None, extra_args=[]): from subprocess import Popen args = [ "-N", self.moniker, "-d", self.path, ] if memory_megs is not None: args.extend(["-l", "pmem=%dmb" % memory_megs]) from os import getenv for var, value in six.iteritems(env): if value is INHERIT: value = getenv(var) args += ["-v", "%s=%s" % (var, value)] args.extend(extra_args) subproc = Popen(["qsub"] + args + ["run.sh"], cwd=self.path) if subproc.wait() != 0: raise RuntimeError("Process submission of %s failed" % self.moniker) def guess_job_class(): from subprocess import Popen, PIPE, STDOUT qstat_helplines = Popen(["qstat", "--help"], stdout=PIPE, stderr=STDOUT).communicate()[0].split("\n") if qstat_helplines[0].startswith("GE"): return GridEngineJob else: return PBSJob class ConstructorPlaceholder: def __init__(self, classname, *args, **kwargs): self.classname = classname self.args = args self.kwargs = kwargs def arg(self, i): return self.args[i] def kwarg(self, name): return self.kwargs[name] def __str__(self): return "%s(%s)" % (self.classname, ",".join( [str(arg) for arg in self.args] + ["%s=%s" % (kw, repr(val)) for kw, val in six.iteritems(self.kwargs)] ) ) __repr__ = __str__ pytools-2015.1.6/pytools/convergence.py0000644000175000017500000000722112613226301021112 0ustar andreasandreas00000000000000from __future__ import absolute_import import numpy as np from six.moves import range from six.moves import zip # {{{ eoc estimation -------------------------------------------------------------- def estimate_order_of_convergence(abscissae, errors): """Assuming that abscissae and errors are connected by a law of the form error = constant * abscissa ^ (order), this function finds, in a least-squares sense, the best approximation of constant and order for the given data set. It returns a tuple (constant, order). """ assert len(abscissae) == len(errors) if len(abscissae) <= 1: raise RuntimeError("Need more than one value to guess order of convergence.") coefficients = np.polyfit(np.log10(abscissae), np.log10(errors), 1) return 10**coefficients[-1], coefficients[-2] class EOCRecorder(object): def __init__(self): self.history = [] def add_data_point(self, abscissa, error): self.history.append((abscissa, error)) def estimate_order_of_convergence(self, gliding_mean=None): abscissae = np.array([a for a, e in self.history]) errors = np.array([e for a, e in self.history]) size = len(abscissae) if gliding_mean is None: gliding_mean = size data_points = size - gliding_mean + 1 result = np.zeros((data_points, 2), float) for i in range(data_points): result[i, 0], result[i, 1] = estimate_order_of_convergence( abscissae[i:i+gliding_mean], errors[i:i+gliding_mean]) return result def order_estimate(self): return self.estimate_order_of_convergence()[0, 1] def max_error(self): return max(err for absc, err in self.history) def pretty_print(self, abscissa_label="h", error_label="Error", gliding_mean=2): from pytools import Table tbl = Table() tbl.add_row((abscissa_label, error_label, "Running EOC")) gm_eoc = self.estimate_order_of_convergence(gliding_mean) for i, (absc, err) in enumerate(self.history): if i < gliding_mean-1: tbl.add_row((str(absc), str(err), "")) else: tbl.add_row((str(absc), str(err), str(gm_eoc[i-gliding_mean+1, 1]))) if len(self.history) > 1: return str(tbl) + "\n\nOverall EOC: %s" \ % self.estimate_order_of_convergence()[0, 1] else: return str(tbl) def __str__(self): return self.pretty_print() def write_gnuplot_file(self, filename): outfile = open(filename, "w") for absc, err in self.history: outfile.write("%f %f\n" % (absc, err)) result = self.estimate_order_of_convergence() const = result[0, 0] order = result[0, 1] outfile.write("\n") for absc, err in self.history: outfile.write("%f %f\n" % (absc, const * absc**(-order))) # }}} # {{{ p convergence verifier class PConvergenceVerifier(object): def __init__(self): self.orders = [] self.errors = [] def add_data_point(self, order, error): self.orders.append(order) self.errors.append(error) def __str__(self): from pytools import Table tbl = Table() tbl.add_row(("p", "error")) for p, err in zip(self.orders, self.errors): tbl.add_row((str(p), str(err))) return str(tbl) def __call__(self): orders = np.array(self.orders, np.float64) errors = np.abs(np.array(self.errors, np.float64)) rel_change = np.diff(1e-20 + np.log10(errors)) / np.diff(orders) assert (rel_change < -0.2).all() # }}} # vim: foldmethod=marker pytools-2015.1.6/pytools/datatable.py0000644000175000017500000002112212610520312020525 0ustar andreasandreas00000000000000from __future__ import absolute_import from pytools import Record import six from six.moves import range from six.moves import zip class Row(Record): pass class DataTable: """An in-memory relational database table.""" def __init__(self, column_names, column_data=None): """Construct a new table, with the given C{column_names}. @arg column_names: An indexable of column name strings. @arg column_data: None or a list of tuples of the same length as C{column_names} indicating an initial set of data. """ if column_data is None: self.data = [] else: self.data = column_data self.column_names = column_names self.column_indices = dict( (colname, i) for i, colname in enumerate(column_names)) if len(self.column_indices) != len(self.column_names): raise RuntimeError("non-unique column names encountered") def __bool__(self): return bool(self.data) def __len__(self): return len(self.data) def __iter__(self): return self.data.__iter__() def __str__(self): """Return a pretty-printed version of the table.""" def col_width(i): width = len(self.column_names[i]) if self: width = max(width, max(len(str(row[i])) for row in self.data)) return width col_widths = [col_width(i) for i in range(len(self.column_names))] def format_row(row): return "|".join([str(cell).ljust(col_width) for cell, col_width in zip(row, col_widths)]) lines = [format_row(self.column_names), "+".join("-"*col_width for col_width in col_widths)] + \ [format_row(row) for row in self.data] return "\n".join(lines) def insert(self, **kwargs): values = [None for i in range(len(self.column_names))] for key, val in six.iteritems(kwargs): values[self.column_indices[key]] = val self.insert_row(tuple(values)) def insert_row(self, values): assert isinstance(values, tuple) assert len(values) == len(self.column_names) self.data.append(values) def insert_rows(self, rows): for row in rows: self.insert_row(row) def filtered(self, **kwargs): if not kwargs: return self criteria = tuple( (self.column_indices[key], value) for key, value in six.iteritems(kwargs)) result_data = [] for row in self.data: satisfied = True for idx, val in criteria: if row[idx] != val: satisfied = False break if satisfied: result_data.append(row) return DataTable(self.column_names, result_data) def get(self, **kwargs): filtered = self.filtered(**kwargs) if len(filtered) < 1: raise RuntimeError("no matching entry for get()") if len(filtered) > 1: raise RuntimeError("more than one matching entry for get()") return Row(dict(list(zip(self.column_names, filtered.data[0])))) def clear(self): del self.data[:] def copy(self): """Make a copy of the instance, but leave individual rows untouched. If the rows are modified later, they will also be modified in the copy. """ return DataTable(self.column_names, self.data[:]) def deep_copy(self): """Make a copy of the instance down to the row level. The copy's rows may be modified independently from the original. """ return DataTable(self.column_names, [row[:] for row in self.data]) def sort(self, columns, reverse=False): col_indices = [self.column_indices[col] for col in columns] def mykey(row): return tuple( row[col_index] for col_index in col_indices) self.data.sort(reverse=reverse, key=mykey) def aggregated(self, groupby, agg_column, aggregate_func): gb_indices = [self.column_indices[col] for col in groupby] agg_index = self.column_indices[agg_column] first = True result_data = [] # to pacify pyflakes: last_values = None agg_values = None for row in self.data: this_values = tuple(row[i] for i in gb_indices) if first or this_values != last_values: if not first: result_data.append(last_values + (aggregate_func(agg_values),)) agg_values = [row[agg_index]] last_values = this_values first = False else: agg_values.append(row[agg_index]) if not first and agg_values: result_data.append(this_values + (aggregate_func(agg_values),)) return DataTable( [self.column_names[i] for i in gb_indices] + [agg_column], result_data) def join(self, column, other_column, other_table, outer=False): """Return a tabled joining this and the C{other_table} on C{column}. The new table has the following columns: - C{column}, titled the same as in this table. - the columns of this table, minus C{column}. - the columns of C{other_table}, minus C{other_column}. Assumes both tables are sorted ascendingly by the column by which they are joined. """ def without(indexable, idx): return indexable[:idx] + indexable[idx+1:] this_key_idx = self.column_indices[column] other_key_idx = other_table.column_indices[other_column] this_iter = self.data.__iter__() other_iter = other_table.data.__iter__() result_columns = [self.column_names[this_key_idx]] + \ without(self.column_names, this_key_idx) + \ without(other_table.column_names, other_key_idx) result_data = [] this_row = next(this_iter) other_row = next(other_iter) this_over = False other_over = False while True: this_batch = [] other_batch = [] if this_over: run_other = True elif other_over: run_this = True else: this_key = this_row[this_key_idx] other_key = other_row[other_key_idx] run_this = this_key < other_key run_other = this_key > other_key if this_key == other_key: run_this = run_other = True if run_this and not this_over: key = this_key while this_row[this_key_idx] == this_key: this_batch.append(this_row) try: this_row = next(this_iter) except StopIteration: this_over = True break else: if outer: this_batch = [(None,) * len(self.column_names)] if run_other and not other_over: key = other_key while other_row[other_key_idx] == other_key: other_batch.append(other_row) try: other_row = next(other_iter) except StopIteration: other_over = True break else: if outer: other_batch = [(None,) * len(other_table.column_names)] for this_batch_row in this_batch: for other_batch_row in other_batch: result_data.append((key,) + without(this_batch_row, this_key_idx) + without(other_batch_row, other_key_idx)) if outer: if this_over and other_over: break else: if this_over or other_over: break return DataTable(result_columns, result_data) def restricted(self, columns): col_indices = [self.column_indices[col] for col in columns] return DataTable(columns, [[row[i] for i in col_indices] for row in self.data]) def column_data(self, column): col_index = self.column_indices[column] return [row[col_index] for row in self.data] def write_csv(self, filelike, **kwargs): from csv import writer csvwriter = writer(filelike, **kwargs) csvwriter.writerow(self.column_names) csvwriter.writerows(self.data) pytools-2015.1.6/pytools/debug.py0000644000175000017500000001333512610520312017701 0ustar andreasandreas00000000000000from __future__ import absolute_import from __future__ import print_function from pytools import memoize import six from six.moves import input # {{{ debug files ------------------------------------------------------------- def make_unique_filesystem_object(stem, extension="", directory="", creator=None): """ :param extension: needs a leading dot. :param directory: must not have a trailing slash. """ from os.path import join import os if creator is None: def creator(name): return os.fdopen(os.open(name, os.O_CREAT | os.O_WRONLY | os.O_EXCL, 0o444), "w") i = 0 while True: fname = join(directory, "%s-%d%s" % (stem, i, extension)) try: return creator(fname), fname except OSError: i += 1 @memoize def get_run_debug_directory(): def creator(name): from os import mkdir mkdir(name) return name return make_unique_filesystem_object("run-debug", creator=creator)[0] def open_unique_debug_file(stem, extension=""): """ :param extension: needs a leading dot. """ return make_unique_filesystem_object( stem, extension, get_run_debug_directory()) # }}} # {{{ refcount debugging ------------------------------------------------------ class RefDebugQuit(Exception): pass def refdebug(obj, top_level=True, exclude=[]): from types import FrameType def is_excluded(o): for ex in exclude: if o is ex: return True from sys import _getframe if isinstance(o, FrameType) and \ o.f_code.co_filename == _getframe().f_code.co_filename: return True return False if top_level: try: refdebug(obj, top_level=False, exclude=exclude) except RefDebugQuit: pass else: import gc print_head = True print("-------------->") try: reflist = [x for x in gc.get_referrers(obj) if not is_excluded(x)] idx = 0 while True: if print_head: print("referring to", id(obj), type(obj), obj) print("----------------------") print_head = False r = reflist[idx] if isinstance(r, FrameType): s = str(r.f_code) else: s = str(r) print("%d/%d: " % (idx, len(reflist)), id(r), type(r), s) if isinstance(r, dict): for k, v in six.iteritems(r): if v is obj: print("...referred to from key", k) print("[d]ig, [n]ext, [p]rev, [e]val, [r]eturn, [q]uit?") response = input() if response == "d": refdebug(r, top_level=False, exclude=exclude+[reflist]) print_head = True elif response == "n": if idx + 1 < len(reflist): idx += 1 elif response == "p": if idx - 1 >= 0: idx -= 1 elif response == "e": print("type expression, obj is your object:") expr_str = input() try: res = eval(expr_str, {"obj": r}) except: from traceback import print_exc print_exc() print(res) elif response == "r": return elif response == "q": raise RefDebugQuit() else: print("WHAT YOU SAY!!! (invalid choice)") finally: print("<--------------") # }}} # {{{ interactive shell def get_shell_hist_filename(): import os _home = os.environ.get('HOME', '/') return os.path.join(_home, ".pytools-debug-shell-history") def setup_readline(): from os.path import exists hist_filename = get_shell_hist_filename() if exists(hist_filename): try: readline.read_history_file(hist_filename) except Exception: # http://docs.python.org/3/howto/pyporting.html#capturing-the-currently-raised-exception # noqa import sys e = sys.exc_info()[1] from warnings import warn warn("Error opening readline history file: %s" % e) readline.parse_and_bind("tab: complete") try: import readline import rlcompleter HAVE_READLINE = True except ImportError: HAVE_READLINE = False else: setup_readline() class SetPropagatingDict(dict): def __init__(self, source_dicts, target_dict): dict.__init__(self) for s in source_dicts[::-1]: self.update(s) self.target_dict = target_dict def __setitem__(self, key, value): dict.__setitem__(self, key, value) self.target_dict[key] = value def __delitem__(self, key): dict.__delitem__(self, key) del self.target_dict[key] def shell(locals=None, globals=None): from inspect import currentframe, getouterframes calling_frame = getouterframes(currentframe())[1][0] if locals is None: locals = calling_frame.f_locals if globals is None: globals = calling_frame.f_globals ns = SetPropagatingDict([locals, globals], locals) if HAVE_READLINE: readline.set_completer( rlcompleter.Completer(ns).complete) from code import InteractiveConsole cons = InteractiveConsole(ns) cons.interact("") readline.write_history_file(get_shell_hist_filename()) # }}} # vim: foldmethod=marker pytools-2015.1.6/pytools/decorator.py0000644000175000017500000001374412610520312020601 0ustar andreasandreas00000000000000from __future__ import absolute_import # Python decorator module # by Michele Simionato # http://www.phyast.pitt.edu/~micheles/python/ ## The basic trick is to generate the source code for the decorated function ## with the right signature and to evaluate it. ## Uncomment the statement 'print >> sys.stderr, func_src' in _decorate ## to understand what is going on. __all__ = ["decorator", "update_wrapper", "getinfo"] import inspect def getinfo(func): """ Returns an info dictionary containing: - name (the name of the function : str) - argnames (the names of the arguments : list) - defaults (the values of the default arguments : tuple) - signature (the signature : str) - doc (the docstring : str) - module (the module name : str) - dict (the function __dict__ : str) >>> def f(self, x=1, y=2, *args, **kw): pass >>> info = getinfo(f) >>> info["name"] 'f' >>> info["argnames"] ['self', 'x', 'y', 'args', 'kw'] >>> info["defaults"] (1, 2) >>> info["signature"] 'self, x, y, *args, **kw' """ assert inspect.ismethod(func) or inspect.isfunction(func) regargs, varargs, varkwargs, defaults = inspect.getargspec(func) argnames = list(regargs) if varargs: argnames.append(varargs) if varkwargs: argnames.append(varkwargs) signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults, formatvalue=lambda value: "")[1:-1] return dict(name=func.__name__, argnames=argnames, signature=signature, defaults = func.__defaults__, doc=func.__doc__, module=func.__module__, dict=func.__dict__, globals=func.__globals__, closure=func.__closure__) def update_wrapper(wrapper, wrapped, create=False): """ An improvement over functools.update_wrapper. By default it works the same, but if the 'create' flag is set, generates a copy of the wrapper with the right signature and update the copy, not the original. Moreovoer, 'wrapped' can be a dictionary with keys 'name', 'doc', 'module', 'dict', 'defaults'. """ if isinstance(wrapped, dict): infodict = wrapped else: # assume wrapped is a function infodict = getinfo(wrapped) assert not '_wrapper_' in infodict["argnames"], \ '"_wrapper_" is a reserved argument name!' if create: # create a brand new wrapper with the right signature src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict # import sys; print >> sys.stderr, src # for debugging purposes wrapper = eval(src, dict(_wrapper_=wrapper)) try: wrapper.__name__ = infodict['name'] except: # Python version < 2.4 pass wrapper.__doc__ = infodict['doc'] wrapper.__module__ = infodict['module'] wrapper.__dict__.update(infodict['dict']) wrapper.__defaults__ = infodict['defaults'] return wrapper # the real meat is here def _decorator(caller, func): if not (inspect.ismethod(func) or inspect.isfunction(func)): # skip all the fanciness, just do what works return lambda *args, **kwargs: caller(func, *args, **kwargs) infodict = getinfo(func) argnames = infodict['argnames'] assert not ('_call_' in argnames or '_func_' in argnames), \ 'You cannot use _call_ or _func_ as argument names!' src = "lambda %(signature)s: _call_(_func_, %(signature)s)" % infodict dec_func = eval(src, dict(_func_=func, _call_=caller)) return update_wrapper(dec_func, func) def decorator(caller, func=None): """ General purpose decorator factory: takes a caller function as input and returns a decorator with the same attributes. A caller function is any function like this:: def caller(func, *args, **kw): # do something return func(*args, **kw) Here is an example of usage: >>> @decorator ... def chatty(f, *args, **kw): ... print "Calling %r" % f.__name__ ... return f(*args, **kw) >>> chatty.__name__ 'chatty' >>> @chatty ... def f(): pass ... >>> f() Calling 'f' For sake of convenience, the decorator factory can also be called with two arguments. In this casem ``decorator(caller, func)`` is just a shortcut for ``decorator(caller)(func)``. """ from warnings import warn warn("pytools.decorator is deprecated and will be removed in pytools 12. " "Use the 'decorator' module directly instead.", DeprecationWarning, stacklevel=2) if func is None: # return a decorator function return update_wrapper(lambda f : _decorator(caller, f), caller) else: # return a decorated function return _decorator(caller, func) if __name__ == "__main__": import doctest; doctest.testmod() ####################### LEGALESE ################################## ## Redistributions of source code must retain the above copyright ## notice, this list of conditions and the following disclaimer. ## Redistributions in bytecode form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in ## the documentation and/or other materials provided with the ## distribution. ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT ## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR ## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT ## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, ## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, ## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS ## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR ## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE ## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH ## DAMAGE. pytools-2015.1.6/pytools/diskdict.py0000644000175000017500000001322112610520312020403 0ustar andreasandreas00000000000000from __future__ import absolute_import # see end of file for sqlite import from pytools import memoize import six @memoize def get_disk_dict(name, version, **kwargs): import sys import os from os.path import join from tempfile import gettempdir import getpass cache_dir = join(gettempdir(), "%s-v%s-uid%s-py%s" % ( name, version, getpass.getuser(), ".".join(str(i) for i in sys.version_info))) # {{{ ensure cache directory exists try: os.mkdir(cache_dir) except OSError as e: from errno import EEXIST if e.errno != EEXIST: raise # }}} return DiskDict(join(cache_dir, "database.sqlite"), **kwargs) class DiskDict(object): """Provides a disk-backed dictionary. Unlike :mod:`shelve`, this class allows arbitrary values for keys, at a slight performance penalty. Note that this is a dangerous game: The :func:`hash` of many objects changes between runs. In particular, ``hash(None)`` changes between runs. :class:`str`, :class:`unicode`, :class:`int`, :class:`tuple` and :class:`long` seem to be constant for a given Python executable, but they may change for a new version. So don't use this class for data that you absolutely *have* to be able to retrieve. It's fine for caches and the like, though. """ def __init__(self, dbfilename, version_base=(), dep_modules=[], commit_interval=1): self.db_conn = sqlite.connect(dbfilename, timeout=30) try: self.db_conn.execute("select * from data;") except sqlite.OperationalError: self.db_conn.execute(""" create table data ( id integer primary key autoincrement, key_hash integer, key_pickle blob, version_hash integer, version_pickle blob, when_inserted timestamp default current_timestamp, result_pickle blob)""") def mtime(file): if not isinstance(file, six.string_types): # assume file names a module file = file.__file__ import os return os.stat(file).st_mtime from six.moves.cPickle import dumps self.version = (version_base,) + tuple( mtime(dm) for dm in dep_modules) self.version_pickle = dumps(self.version) self.version_hash = hash(self.version) self.cache = {} self.commit_interval = commit_interval self.commit_countdown = self.commit_interval def __contains__(self, key): if key in self.cache: return True else: from six.moves.cPickle import loads for key_pickle, version_pickle, result_pickle in self.db_conn.execute( "select key_pickle, version_pickle, result_pickle from data" " where key_hash = ? and version_hash = ?", (hash(key), self.version_hash)): if loads(str(key_pickle)) == key \ and loads(str(version_pickle)) == self.version: result = loads(str(result_pickle)) self.cache[key] = result return True return False def __getitem__(self, key): try: return self.cache[key] except KeyError: from six.moves.cPickle import loads for key_pickle, version_pickle, result_pickle in self.db_conn.execute( "select key_pickle, version_pickle, result_pickle from data" " where key_hash = ? and version_hash = ?", (hash(key), self.version_hash)): if loads(str(key_pickle)) == key \ and loads(str(version_pickle)) == self.version: result = loads(str(result_pickle)) self.cache[key] = result return result raise KeyError(key) def __delitem__(self, key): if key in self.cache: del self.cache[key] from six.moves.cPickle import loads for item_id, key_pickle, version_pickle in self.db_conn.execute( "select id, key_pickle, version_pickle from data" " where key_hash = ? and version_hash = ?", (hash(key), self.version_hash)): if loads(key_pickle) == key and loads(version_pickle) == self.version: self.db_conn.execute("delete from data where id = ?", (item_id,)) self.commit_countdown -= 1 if self.commit_countdown <= 0: self.commit_countdown = self.commit_interval self.db_conn.commit() def __setitem__(self, key, value): del self[key] self.cache[key] = value from six.moves.cPickle import dumps self.db_conn.execute("insert into data" " (key_hash, key_pickle, version_hash, " " version_pickle, result_pickle)" " values (?,?,?,?,?)", (hash(key), sqlite.Binary(dumps(key)), self.version_hash, self.version_pickle, sqlite.Binary(dumps(value)))) self.commit_countdown -= 1 if self.commit_countdown <= 0: self.commit_countdown = self.commit_interval self.db_conn.commit() try: import sqlite3 as sqlite except ImportError: try: from pysqlite2 import dbapi2 as sqlite except ImportError: import warnings warnings.warn("DiskDict will be memory-only: " "a usable version of sqlite was not found.") DiskDict = dict # noqa pytools-2015.1.6/pytools/lex.py0000644000175000017500000001121612610520312017377 0ustar andreasandreas00000000000000from __future__ import absolute_import from __future__ import print_function import re import six class RuleError(RuntimeError): def __init__(self, rule): self.Rule = rule def __str__(self): return repr(self.Rule) class InvalidTokenError(RuntimeError): def __init__(self, s, str_index): self.string = s self.index = str_index def __str__(self): return "at index %d: ...%s..." % \ (self.index, self.string[self.index:self.index+20]) class ParseError(RuntimeError): def __init__(self, msg, s, token): self.message = msg self.string = s self.Token = token def __str__(self): if self.Token is None: return "%s at end of input" % self.message else: return "%s at index %d: ...%s..." % \ (self.message, self.Token[2], self.string[self.Token[2]:self.Token[2]+20]) class RE(object): def __init__(self, s, flags=0): self.Content = s self.RE = re.compile(s, flags) def __repr__(self): return "RE(%s)" % self.Content def lex(lex_table, s, debug=False, match_objects=False): rule_dict = dict(lex_table) def matches_rule(rule, s, start): if debug: print("Trying", rule, "on", s[start:]) if isinstance(rule, tuple): if rule[0] == "|": for subrule in rule[1:]: length, match_obj = matches_rule( subrule, s, start) if length: return length, match_obj return 0, None else: my_match_length = 0 for subrule in rule: length, _ = matches_rule(subrule, s, start) if length: my_match_length += length start += length else: return 0, None return my_match_length, None elif isinstance(rule, six.string_types): return matches_rule(rule_dict[rule], s, start) elif isinstance(rule, RE): match_obj = rule.RE.match(s, start) if match_obj: return match_obj.end()-start, match_obj else: return 0, None else: raise RuleError(rule) result = [] i = 0 while i < len(s): rule_matched = False for name, rule in lex_table: length, match_obj = matches_rule(rule, s, i) if length: if match_objects: result.append((name, s[i:i+length], i, match_obj)) else: result.append((name, s[i:i+length], i)) i += length rule_matched = True break if not rule_matched: raise InvalidTokenError(s, i) return result class LexIterator(object): def __init__(self, lexed, raw_str, lex_index=0): self.lexed = lexed self.raw_string = raw_str self.index = lex_index def copy(self): return type(self)(self.lexed, self.raw_string, self.index) def assign(self, rhs): assert self.lexed is rhs.lexed assert self.raw_string is rhs.raw_string self.index = rhs.index def next_tag(self, i=0): return self.lexed[self.index + i][0] def next_str(self, i=0): return self.lexed[self.index + i][1] def next_match_obj(self): return self.lexed[self.index][3] def next_str_and_advance(self): result = self.next_str() self.advance() return result def advance(self): self.index += 1 def is_at_end(self, i=0): return self.index + i >= len(self.lexed) def is_next(self, tag, i=0): return self.next_tag(i) is tag def raise_parse_error(self, msg): if self.is_at_end(): raise ParseError(msg, self.raw_string, None) else: raise ParseError(msg, self.raw_string, self.lexed[self.index]) def expected(self, what_expected): if self.is_at_end(): self.raise_parse_error( "%s expected, end of input found instead" % what_expected) else: self.raise_parse_error( "%s expected, %s found instead" % (what_expected, str(self.next_tag()))) def expect_not_end(self): if self.is_at_end(): self.raise_parse_error("unexpected end of input") def expect(self, tag): self.expect_not_end() if not self.is_next(tag): self.expected(str(tag)) pytools-2015.1.6/pytools/log.py0000644000175000017500000011225612610520312017376 0ustar andreasandreas00000000000000from __future__ import division from __future__ import absolute_import from __future__ import print_function import logging import six from six.moves import range from six.moves import zip logger = logging.getLogger(__name__) # {{{ timing function def time(): """Return elapsed CPU time, as a float, in seconds.""" import os time_opt = os.environ.get("PYTOOLS_LOG_TIME") or "wall" if time_opt == "wall": from time import time return time() elif time_opt == "rusage": from resource import getrusage, RUSAGE_SELF return getrusage(RUSAGE_SELF).ru_utime else: raise RuntimeError("invalid timing method '%s'" % time_opt) # }}} # {{{ abstract logging interface class LogQuantity(object): """A source of loggable scalars.""" sort_weight = 0 def __init__(self, name, unit=None, description=None): self.name = name self.unit = unit self.description = description @property def default_aggregator(self): return None def tick(self): """Perform updates required at every :class:`LogManager` tick.""" pass def __call__(self): """Return the current value of the diagnostic represented by this :class:`LogQuantity` or None if no value is available. This is only called if the invocation interval calls for it. """ raise NotImplementedError class PostLogQuantity(LogQuantity): """A source of loggable scalars.""" sort_weight = 0 def prepare_for_tick(self): pass class MultiLogQuantity(object): """A source of multiple loggable scalars.""" sort_weight = 0 def __init__(self, names, units=None, descriptions=None): self.names = names if units is None: units = len(names) * [None] self.units = units if descriptions is None: descriptions = len(names) * [None] self.descriptions = descriptions @property def default_aggregators(self): return [None] * len(self.names) def tick(self): """Perform updates required at every :class:`LogManager` tick.""" pass def __call__(self): """Return an iterable of the current values of the diagnostic represented by this :class:`MultiLogQuantity`. This is only called if the invocation interval calls for it. """ raise NotImplementedError class MultiPostLogQuantity(MultiLogQuantity, PostLogQuantity): pass class DtConsumer(object): def __init__(self, dt): self.dt = dt def set_dt(self, dt): self.dt = dt class TimeTracker(DtConsumer): def __init__(self, dt): DtConsumer.__init__(self, dt) self.t = 0 def tick(self): self.t += self.dt class SimulationLogQuantity(PostLogQuantity, DtConsumer): """A source of loggable scalars that needs to know the simulation timestep.""" def __init__(self, dt, name, unit=None, description=None): PostLogQuantity.__init__(self, name, unit, description) DtConsumer.__init__(self, dt) class PushLogQuantity(LogQuantity): def __init__(self, name, unit=None, description=None): LogQuantity.__init__(self, name, unit, description) self.value = None def push_value(self, value): if self.value is not None: raise RuntimeError("can't push two values per cycle") self.value = value def __call__(self): v = self.value self.value = None return v class CallableLogQuantityAdapter(LogQuantity): """Adapt a 0-ary callable as a :class:`LogQuantity`.""" def __init__(self, callable, name, unit=None, description=None): self.callable = callable LogQuantity.__init__(self, name, unit, description) def __call__(self): return self.callable() # }}} # {{{ manager functionality class _GatherDescriptor(object): def __init__(self, quantity, interval): self.quantity = quantity self.interval = interval class _QuantityData(object): def __init__(self, unit, description, default_aggregator): self.unit = unit self.description = description self.default_aggregator = default_aggregator def _join_by_first_of_tuple(list_of_iterables): loi = [i.__iter__() for i in list_of_iterables] if not loi: return key_vals = [next(iter) for iter in loi] keys = [kv[0] for kv in key_vals] values = [kv[1] for kv in key_vals] target_key = max(keys) force_advance = False i = 0 while True: while keys[i] < target_key or force_advance: try: new_key, new_value = next(loi[i]) except StopIteration: return assert keys[i] < new_key keys[i] = new_key values[i] = new_value if new_key > target_key: target_key = new_key force_advance = False i += 1 if i >= len(loi): i = 0 if min(keys) == target_key: yield target_key, values[:] force_advance = True def _get_unique_id(): try: from uiid import uuid1 except ImportError: try: import hashlib checksum = hashlib.md5() except ImportError: # for Python << 2.5 import md5 checksum = md5.new() from random import Random rng = Random() rng.seed() for i in range(20): checksum.update(str(rng.randrange(1 << 30))) return checksum.hexdigest() else: return uuid1().hex def _get_random_suffix(n): characters = ( [chr(65+i) for i in range(26)] + [chr(97+i) for i in range(26)] + [chr(48+i) for i in range(10)]) from random import choice return "".join(choice(characters) for i in range(n)) def _set_up_schema(db_conn): # initialize new database db_conn.execute(""" create table quantities ( name text, unit text, description text, default_aggregator blob)""") db_conn.execute(""" create table constants ( name text, value blob)""") db_conn.execute(""" create table warnings ( rank integer, step integer, message text, category text, filename text, lineno integer )""") schema_version = 2 return schema_version class LogManager(object): """A parallel-capable diagnostic time-series logging facility. It is meant to log data from a computation, with certain log quantities available before a cycle, and certain other ones afterwards. A timeline of invocations looks as follows:: tick_before() compute... tick() tick_after() tick_before() compute... tick_after() ... In a time-dependent simulation, each group of :meth:`tick_before` :meth:`tick_after` calls captures data for a single time state, namely that in which the data may have been *before* the "compute" step. However, some data (such as the length of the timestep taken in a time-adpative method) may only be available *after* the completion of the "compute..." stage, which is why :meth:`tick_after` exists. A :class:`LogManager` logs any number of named time series of floats to a file. Non-time-series data, in the form of constants, is also supported and saved. If MPI parallelism is used, the "head rank" below always refers to rank 0. Command line tools called :command:`runalyzer` and :command:`logtool` (deprecated) are available for looking at the data in a saved log. """ def __init__(self, filename=None, mode="r", mpi_comm=None, capture_warnings=True, commit_interval=90): """Initialize this log manager instance. :param filename: If given, the filename to which this log is bound. If this database exists, the current state is loaded from it. :param mode: One of "w", "r" for write, read. "w" assumes that the database is initially empty. May also be "wu" to indicate that a unique filename should be chosen automatically. :arg mpi_comm: A :mod:`mpi4py.MPI.Comm`. If given, logs are periodically synchronized to the head node, which then writes them out to disk. :param capture_warnings: Tap the Python warnings facility and save warnings to the log file. :param commit_interval: actually perform a commit only every N times a commit is requested. """ assert isinstance(mode, six.string_types), "mode must be a string" assert mode in ["w", "r", "wu"], "invalid mode" self.quantity_data = {} self.last_values = {} self.before_gather_descriptors = [] self.after_gather_descriptors = [] self.tick_count = 0 self.commit_interval = commit_interval self.commit_countdown = commit_interval self.constants = {} self.last_save_time = time() # self-timing self.start_time = time() self.t_log = 0 # parallel support self.head_rank = 0 self.mpi_comm = mpi_comm self.is_parallel = mpi_comm is not None if mpi_comm is None: self.rank = 0 else: self.rank = mpi_comm.rank self.head_rank = 0 # watch stuff self.watches = [] self.next_watch_tick = 1 self.have_nonlocal_watches = False # database binding try: import sqlite3 as sqlite except ImportError: try: from pysqlite2 import dbapi2 as sqlite except ImportError: raise ImportError("could not find a usable version of sqlite.") if filename is None: filename = ":memory:" else: if self.is_parallel: filename += "-rank%d" % self.rank while True: suffix = "" if mode == "wu": suffix = "-"+_get_random_suffix(6) self.db_conn = sqlite.connect(filename+suffix, timeout=30) self.mode = mode try: self.db_conn.execute("select * from quantities;") except sqlite.OperationalError: # we're building a new database if mode == "r": raise RuntimeError("Log database '%s' not found" % filename) self.schema_version = _set_up_schema(self.db_conn) self.set_constant("schema_version", self.schema_version) self.set_constant("is_parallel", self.is_parallel) # set globally unique run_id if self.is_parallel: self.set_constant("unique_run_id", self.mpi_comm.bcast(_get_unique_id(), root=self.head_rank)) else: self.set_constant("unique_run_id", _get_unique_id()) if self.is_parallel: self.set_constant("rank_count", self.mpi_comm.Get_size()) else: self.set_constant("rank_count", 1) else: # we've opened an existing database if mode == "w": raise RuntimeError("Log database '%s' already exists" % filename) elif mode == "wu": # try again with a new suffix continue self._load() break self.old_showwarning = None if capture_warnings: self.capture_warnings(True) def capture_warnings(self, enable=True): def _showwarning(message, category, filename, lineno, file=None, line=None): try: self.old_showwarning(message, category, filename, lineno, file, line) except TypeError: # cater to Python 2.5 and earlier self.old_showwarning(message, category, filename, lineno) if self.schema_version >= 1 and self.mode == "w": if self.schema_version >= 2: self.db_conn.execute("insert into warnings values (?,?,?,?,?,?)", (self.rank, self.tick_count, str(message), str(category), filename, lineno)) else: self.db_conn.execute("insert into warnings values (?,?,?,?,?)", (self.tick_count, str(message), str(category), filename, lineno)) import warnings if enable: if self.old_showwarning is None: pass self.old_showwarning = warnings.showwarning warnings.showwarning = _showwarning else: raise RuntimeError("Warnings capture was enabled twice") else: if self.old_showwarning is None: raise RuntimeError( "Warnings capture was disabled, but never enabled") else: warnings.showwarning = self.old_showwarning self.old_showwarning = None def _load(self): if self.mpi_comm and self.mpi_comm.rank != self.head_rank: return from pickle import loads for name, value in self.db_conn.execute("select name, value from constants"): self.constants[name] = loads(value) self.schema_version = self.constants.get("schema_version", 0) self.is_parallel = self.constants["is_parallel"] for name, unit, description, def_agg in self.db_conn.execute( "select name, unit, description, default_aggregator " "from quantities"): self.quantity_data[name] = _QuantityData( unit, description, loads(def_agg)) def close(self): if self.old_showwarning is not None: self.capture_warnings(False) self.save() self.db_conn.close() def get_table(self, q_name): if q_name not in self.quantity_data: raise KeyError("invalid quantity name '%s'" % q_name) from pytools.datatable import DataTable result = DataTable(["step", "rank", "value"]) for row in self.db_conn.execute( "select step, rank, value from %s" % q_name): result.insert_row(row) return result def get_warnings(self): columns = ["step", "message", "category", "filename", "lineno"] if self.schema_version >= 2: columns.insert(0, "rank") from pytools.datatable import DataTable result = DataTable(columns) for row in self.db_conn.execute( "select %s from warnings" % (", ".join(columns))): result.insert_row(row) return result def add_watches(self, watches): """Add quantities that are printed after every time step.""" from pytools import Record class WatchInfo(Record): pass for watch in watches: if isinstance(watch, tuple): display, expr = watch else: display = watch expr = watch parsed = self._parse_expr(expr) parsed, dep_data = self._get_expr_dep_data(parsed) from pytools import any self.have_nonlocal_watches = self.have_nonlocal_watches or \ any(dd.nonlocal_agg for dd in dep_data) from pymbolic import compile compiled = compile(parsed, [dd.varname for dd in dep_data]) watch_info = WatchInfo(display=display, parsed=parsed, dep_data=dep_data, compiled=compiled) self.watches.append(watch_info) def set_constant(self, name, value): """Make a named, constant value available in the log.""" existed = name in self.constants self.constants[name] = value from pickle import dumps value = buffer(dumps(value)) if existed: self.db_conn.execute("update constants set value = ? where name = ?", (value, name)) else: self.db_conn.execute("insert into constants values (?,?)", (name, value)) self._commit() def _insert_datapoint(self, name, value): if value is None: return self.last_values[name] = value try: self.db_conn.execute("insert into %s values (?,?,?)" % name, (self.tick_count, self.rank, float(value))) except: print("while adding datapoint for '%s':" % name) raise def _gather_for_descriptor(self, gd): if self.tick_count % gd.interval == 0: q_value = gd.quantity() if isinstance(gd.quantity, MultiLogQuantity): for name, value in zip(gd.quantity.names, q_value): self._insert_datapoint(name, value) else: self._insert_datapoint(gd.quantity.name, q_value) def tick(self): """Record data points from each added :class:`LogQuantity`. May also checkpoint data to disk, and/or synchronize data points to the head rank. """ from warnings import warn warn("LogManager.tick() is deprecated. " "Use LogManager.tick_{before,after}().", DeprecationWarning) self.tick_before() self.tick_after() def tick_before(self): """Record data points from each added :class:`LogQuantity` that is not an instance of :class:`PostLogQuantity`. Also, invoke :meth:`PostLogQuantity.prepare_for_tick` on :class:`PostLogQuantity` instances. """ tick_start_time = time() for gd in self.before_gather_descriptors: self._gather_for_descriptor(gd) for gd in self.after_gather_descriptors: gd.quantity.prepare_for_tick() self.t_log = time() - tick_start_time def tick_after(self): """Record data points from each added :class:`LogQuantity` that is an instance of :class:`PostLogQuantity`. May also checkpoint data to disk. """ tick_start_time = time() for gd_lst in [self.before_gather_descriptors, self.after_gather_descriptors]: for gd in gd_lst: gd.quantity.tick() for gd in self.after_gather_descriptors: self._gather_for_descriptor(gd) self.tick_count += 1 if tick_start_time - self.start_time > 15*60: save_interval = 5*60 else: save_interval = 15 if tick_start_time > self.last_save_time + save_interval: self.save() # print watches if self.tick_count == self.next_watch_tick: self._watch_tick() self.t_log += time() - tick_start_time def _commit(self): self.commit_countdown -= 1 if self.commit_countdown <= 0: self.commit_countdown = self.commit_interval self.db_conn.commit() def save(self): from sqlite3 import OperationalError try: self.db_conn.commit() except OperationalError as e: from warnings import warn warn("encountered sqlite error during commit: %s" % e) self.last_save_time = time() def add_quantity(self, quantity, interval=1): """Add an object derived from :class:`LogQuantity` to this manager.""" def add_internal(name, unit, description, def_agg): logger.debug("add log quantity '%s'" % name) if name in self.quantity_data: raise RuntimeError("cannot add the same quantity '%s' twice" % name) self.quantity_data[name] = _QuantityData(unit, description, def_agg) from pickle import dumps self.db_conn.execute("""insert into quantities values (?,?,?,?)""", ( name, unit, description, buffer(dumps(def_agg)))) self.db_conn.execute("""create table %s (step integer, rank integer, value real)""" % name) self._commit() gd = _GatherDescriptor(quantity, interval) if isinstance(quantity, PostLogQuantity): gd_list = self.after_gather_descriptors else: gd_list = self.before_gather_descriptors gd_list.append(gd) gd_list.sort(key=lambda gd: gd.quantity.sort_weight) if isinstance(quantity, MultiLogQuantity): for name, unit, description, def_agg in zip( quantity.names, quantity.units, quantity.descriptions, quantity.default_aggregators): add_internal(name, unit, description, def_agg) else: add_internal(quantity.name, quantity.unit, quantity.description, quantity.default_aggregator) def get_expr_dataset(self, expression, description=None, unit=None): """Prepare a time-series dataset for a given expression. @arg expression: A C{pymbolic} expression that may involve the time-series variables and the constants in this :class:`LogManager`. If there is data from multiple ranks for a quantity occuring in this expression, an aggregator may have to be specified. @return: C{(description, unit, table)}, where C{table} is a list of tuples C{(tick_nbr, value)}. Aggregators are specified as follows: - C{qty.min}, C{qty.max}, C{qty.avg}, C{qty.sum}, C{qty.norm2} - C{qty[rank_nbr]} - C{qty.loc} """ parsed = self._parse_expr(expression) parsed, dep_data = self._get_expr_dep_data(parsed) # aggregate table data for dd in dep_data: table = self.get_table(dd.name) table.sort(["step"]) dd.table = table.aggregated(["step"], "value", dd.agg_func).data # evaluate unit and description, if necessary if unit is None: from pymbolic import substitute, parse unit_dict = dict((dd.varname, dd.qdat.unit) for dd in dep_data) from pytools import all if all(v is not None for v in six.itervalues(unit_dict)): unit_dict = dict((k, parse(v)) for k, v in six.iteritems(unit_dict)) unit = substitute(parsed, unit_dict) else: unit = None if description is None: description = expression # compile and evaluate from pymbolic import compile compiled = compile(parsed, [dd.varname for dd in dep_data]) data = [] for key, values in _join_by_first_of_tuple(dd.table for dd in dep_data): try: data.append((key, compiled(*values))) except ZeroDivisionError: pass return (description, unit, data) def get_joint_dataset(self, expressions): """Return a joint data set for a list of expressions. @arg expressions: a list of either strings representing expressions directly, or triples (descr, unit, expr). In the former case, the description and the unit are found automatically, if possible. In the latter case, they are used as specified. @return: A triple C{(descriptions, units, table)}, where C{table} is a a list of C{[(tstep, (val_expr1, val_expr2,...)...]}. """ # dubs is a list of (desc, unit, table) triples as # returned by get_expr_dataset dubs = [] for expr in expressions: if isinstance(expr, str): dub = self.get_expr_dataset(expr) else: expr_descr, expr_unit, expr_str = expr dub = self.get_expr_dataset( expr_str, description=expr_descr, unit=expr_unit) dubs.append(dub) zipped_dubs = list(zip(*dubs)) zipped_dubs[2] = list( _join_by_first_of_tuple(zipped_dubs[2])) return zipped_dubs def get_plot_data(self, expr_x, expr_y, min_step=None, max_step=None): """Generate plot-ready data. :return: ``(data_x, descr_x, unit_x), (data_y, descr_y, unit_y)`` """ (descr_x, descr_y), (unit_x, unit_y), data = \ self.get_joint_dataset([expr_x, expr_y]) if min_step is not None: data = [(step, tup) for step, tup in data if min_step <= step] if max_step is not None: data = [(step, tup) for step, tup in data if step <= max_step] stepless_data = [tup for step, tup in data] if stepless_data: data_x, data_y = list(zip(*stepless_data)) else: data_x = [] data_y = [] return (data_x, descr_x, unit_x), \ (data_y, descr_y, unit_y) def plot_gnuplot(self, gp, expr_x, expr_y, **kwargs): """Plot data to Gnuplot.py. @arg gp: a Gnuplot.Gnuplot instance to which the plot is sent. @arg expr_x: an allowed argument to :meth:`get_joint_dataset`. @arg expr_y: an allowed argument to :meth:`get_joint_dataset`. @arg kwargs: keyword arguments that are directly passed on to C{Gnuplot.Data}. """ (data_x, descr_x, unit_x), (data_y, descr_y, unit_y) = \ self.get_plot_data(expr_x, expr_y) gp.xlabel("%s [%s]" % (descr_x, unit_x)) gp.ylabel("%s [%s]" % (descr_y, unit_y)) from gnuplot_py import Data gp.plot(Data(data_x, data_y, **kwargs)) def write_datafile(self, filename, expr_x, expr_y): (data_x, label_x), (data_y, label_y) = self.get_plot_data( expr_x, expr_y) outf = open(filename, "w") outf.write("# %s vs. %s" % (label_x, label_y)) for dx, dy in zip(data_x, data_y): outf.write("%s\t%s\n" % (repr(dx), repr(dy))) outf.close() def plot_matplotlib(self, expr_x, expr_y): from pylab import xlabel, ylabel, plot (data_x, descr_x, unit_x), (data_y, descr_y, unit_y) = \ self.get_plot_data(expr_x, expr_y) xlabel("%s [%s]" % (descr_x, unit_x)) ylabel("%s [%s]" % (descr_y, unit_y)) plot(data_x, data_y) # {{{ private functionality def _parse_expr(self, expr): from pymbolic import parse, substitute parsed = parse(expr) # substitute in global constants parsed = substitute(parsed, self.constants) return parsed def _get_expr_dep_data(self, parsed): class Nth: def __init__(self, n): self.n = n def __call__(self, lst): return lst[self.n] from pymbolic.mapper.dependency import DependencyMapper deps = DependencyMapper(include_calls=False)(parsed) # gather information on aggregation expressions dep_data = [] from pymbolic.primitives import Variable, Lookup, Subscript for dep_idx, dep in enumerate(deps): nonlocal_agg = True if isinstance(dep, Variable): name = dep.name if name == "math": continue agg_func = self.quantity_data[name].default_aggregator if agg_func is None: if self.is_parallel: raise ValueError( "must specify explicit aggregator for '%s'" % name) else: agg_func = lambda lst: lst[0] elif isinstance(dep, Lookup): assert isinstance(dep.aggregate, Variable) name = dep.aggregate.name agg_name = dep.name if agg_name == "loc": agg_func = Nth(self.rank) nonlocal_agg = False elif agg_name == "min": agg_func = min elif agg_name == "max": agg_func = max elif agg_name == "avg": from pytools import average agg_func = average elif agg_name == "sum": agg_func = sum elif agg_name == "norm2": from math import sqrt agg_func = lambda iterable: sqrt( sum(entry**2 for entry in iterable)) else: raise ValueError("invalid rank aggregator '%s'" % agg_name) elif isinstance(dep, Subscript): assert isinstance(dep.aggregate, Variable) name = dep.aggregate.name from pymbolic import evaluate agg_func = Nth(evaluate(dep.index)) qdat = self.quantity_data[name] from pytools import Record class DependencyData(Record): pass this_dep_data = DependencyData(name=name, qdat=qdat, agg_func=agg_func, varname="logvar%d" % dep_idx, expr=dep, nonlocal_agg=nonlocal_agg) dep_data.append(this_dep_data) # substitute in the "logvar" variable names from pymbolic import var, substitute parsed = substitute(parsed, dict((dd.expr, var(dd.varname)) for dd in dep_data)) return parsed, dep_data def _watch_tick(self): if not self.have_nonlocal_watches and self.rank != self.head_rank: return data_block = dict((qname, self.last_values.get(qname, 0)) for qname in six.iterkeys(self.quantity_data)) if self.mpi_comm is not None and self.have_nonlocal_watches: gathered_data = self.mpi_comm.gather(data_block, self.head_rank) else: gathered_data = [data_block] if self.rank == self.head_rank: values = {} for data_block in gathered_data: for name, value in six.iteritems(data_block): values.setdefault(name, []).append(value) def compute_watch_str(watch): try: return "%s=%g" % (watch.display, watch.compiled( *[dd.agg_func(values[dd.name]) for dd in watch.dep_data])) except ZeroDivisionError: return "%s:div0" % watch.display if self.watches: print(" | ".join( compute_watch_str(watch) for watch in self.watches)) ticks_per_sec = self.tick_count/max(1, time()-self.start_time) self.next_watch_tick = self.tick_count + int(max(1, ticks_per_sec)) if self.mpi_comm is not None and self.have_nonlocal_watches: self.next_watch_tick = self.mpi_comm.bcast( self.next_watch_tick, self.head_rank) # }}} # }}} # {{{ actual data loggers class _SubTimer: def __init__(self, itimer): self.itimer = itimer self.start_time = time() self.elapsed = 0 def stop(self): self.elapsed += time() - self.start_time del self.start_time return self def submit(self): self.itimer.add_time(self.elapsed) del self.elapsed class IntervalTimer(PostLogQuantity): """Records elapsed times.""" def __init__(self, name, description=None): LogQuantity.__init__(self, name, "s", description) self.elapsed = 0 def start_sub_timer(self): return _SubTimer(self) def add_time(self, t): self.start_time = time() self.elapsed += t def __call__(self): result = self.elapsed self.elapsed = 0 return result class LogUpdateDuration(LogQuantity): """Records how long the last :meth:`LogManager.tick` invocation took.""" # FIXME this is off by one tick def __init__(self, mgr, name="t_log"): LogQuantity.__init__(self, name, "s", "Time spent updating the log") self.log_manager = mgr def __call__(self): return self.log_manager.t_log class EventCounter(PostLogQuantity): """Counts events signaled by :meth:`add`.""" def __init__(self, name="interval", description=None): PostLogQuantity.__init__(self, name, "1", description) self.events = 0 def add(self, n=1): self.events += n def transfer(self, counter): self.events += counter.pop() def prepare_for_tick(self): self.events = 0 def __call__(self): result = self.events return result def time_and_count_function(f, timer, counter=None, increment=1): def inner_f(*args, **kwargs): if counter is not None: counter.add(increment) sub_timer = timer.start_sub_timer() try: return f(*args, **kwargs) finally: sub_timer.stop().submit() return inner_f class TimestepCounter(LogQuantity): """Counts the number of times :meth:`LogManager.tick` is called.""" def __init__(self, name="step"): LogQuantity.__init__(self, name, "1", "Timesteps") self.steps = 0 def __call__(self): result = self.steps self.steps += 1 return result class StepToStepDuration(PostLogQuantity): """Records the CPU time between invocations of :meth:`LogManager.tick_before` and :meth:`LogManager.tick_after`. """ def __init__(self, name="t_2step"): PostLogQuantity.__init__(self, name, "s", "Step-to-step duration") self.last_start_time = None self.last2_start_time = None def prepare_for_tick(self): self.last2_start_time = self.last_start_time self.last_start_time = time() def __call__(self): if self.last2_start_time is None: return None else: return self.last_start_time - self.last2_start_time class TimestepDuration(PostLogQuantity): """Records the CPU time between the starts of time steps. :meth:`LogManager.tick_before` and :meth:`LogManager.tick_after`. """ # We would like to run last, so that if log gathering takes any # significant time, we catch that, too. (CUDA sync-on-time-taking, # I'm looking at you.) sort_weight = 1000 def __init__(self, name="t_step"): PostLogQuantity.__init__(self, name, "s", "Time step duration") def prepare_for_tick(self): self.last_start = time() def __call__(self): now = time() result = now - self.last_start del self.last_start return result class CPUTime(LogQuantity): """Records (monotonically increasing) CPU time.""" def __init__(self, name="t_cpu"): LogQuantity.__init__(self, name, "s", "Wall time") self.start = time() def __call__(self): return time()-self.start class ETA(LogQuantity): """Records an estimate of how long the computation will still take.""" def __init__(self, total_steps, name="t_eta"): LogQuantity.__init__(self, name, "s", "Estimated remaining duration") self.steps = 0 self.total_steps = total_steps self.start = time() def __call__(self): fraction_done = self.steps/self.total_steps self.steps += 1 time_spent = time()-self.start if fraction_done > 1e-9: return time_spent/fraction_done-time_spent else: return 0 def add_general_quantities(mgr): """Add generally applicable :class:`LogQuantity` objects to C{mgr}.""" mgr.add_quantity(TimestepDuration()) mgr.add_quantity(StepToStepDuration()) mgr.add_quantity(CPUTime()) mgr.add_quantity(LogUpdateDuration(mgr)) mgr.add_quantity(TimestepCounter()) class SimulationTime(TimeTracker, LogQuantity): """Record (monotonically increasing) simulation time.""" def __init__(self, dt, name="t_sim", start=0): LogQuantity.__init__(self, name, "s", "Simulation Time") TimeTracker.__init__(self, dt) def __call__(self): return self.t class Timestep(SimulationLogQuantity): """Record the magnitude of the simulated time step.""" def __init__(self, dt, name="dt", unit="s"): SimulationLogQuantity.__init__(self, dt, name, unit, "Simulation Timestep") def __call__(self): return self.dt def set_dt(mgr, dt): """Set the simulation timestep on :class:`LogManager` C{mgr} to C{dt}.""" for gd_lst in [mgr.before_gather_descriptors, mgr.after_gather_descriptors]: for gd in gd_lst: if isinstance(gd.quantity, DtConsumer): gd.quantity.set_dt(dt) def add_simulation_quantities(mgr, dt=None): """Add :class:`LogQuantity` objects relating to simulation time.""" if dt is not None: from warnings import warn warn("Specifying dt ahead of time is a deprecated practice. " "Use pytools.log.set_dt() instead.") mgr.add_quantity(SimulationTime(dt)) mgr.add_quantity(Timestep(dt)) def add_run_info(mgr): """Add generic run metadata, such as command line, host, and time.""" import sys mgr.set_constant("cmdline", " ".join(sys.argv)) from socket import gethostname mgr.set_constant("machine", gethostname()) from time import localtime, strftime, time mgr.set_constant("date", strftime("%a, %d %b %Y %H:%M:%S %Z", localtime())) mgr.set_constant("unixtime", time()) # }}} # vim: foldmethod=marker pytools-2015.1.6/pytools/mpi.py0000644000175000017500000000133312610520312017373 0ustar andreasandreas00000000000000from __future__ import absolute_import def check_for_mpi_relaunch(argv): if argv[1] != "--mpi-relaunch": return from pickle import loads f, args, kwargs = loads(argv[2]) f(*args, **kwargs) import sys sys.exit() def run_with_mpi_ranks(py_script, ranks, callable, args=(), kwargs=None): if kwargs is None: kwargs = {} import sys import os newenv = os.environ.copy() newenv["PYTOOLS_RUN_WITHIN_MPI"] = "1" from pickle import dumps callable_and_args = dumps((callable, args, kwargs)) from subprocess import check_call check_call(["mpirun", "-np", str(ranks), sys.executable, py_script, "--mpi-relaunch", callable_and_args], env=newenv) pytools-2015.1.6/pytools/mpiwrap.py0000644000175000017500000000073412610520312020271 0ustar andreasandreas00000000000000"""See pytools.prefork for this module's reason for being.""" from __future__ import absolute_import import mpi4py.rc mpi4py.rc.initialize = False import pytools.prefork pytools.prefork.enable_prefork() from mpi4py.MPI import * if Is_initialized(): raise RuntimeError("MPI already initialized before MPI wrapper import") def InitWithAutoFinalize(*args, **kwargs): result = Init(*args, **kwargs) import atexit atexit.register(Finalize) return result pytools-2015.1.6/pytools/obj_array.py0000644000175000017500000001020612610520312020555 0ustar andreasandreas00000000000000from __future__ import absolute_import, division import numpy as np from pytools import my_decorator as decorator, MovedFunctionDeprecationWrapper def gen_len(expr): from pytools.obj_array import is_obj_array if is_obj_array(expr): return len(expr) else: return 1 def gen_slice(expr, slice): result = expr[slice] if len(result) == 1: return result[0] else: return result def is_obj_array(val): try: return isinstance(val, np.ndarray) and val.dtype == object except AttributeError: return False def to_obj_array(ary): ls = log_shape(ary) result = np.empty(ls, dtype=object) from pytools import indices_in_shape for i in indices_in_shape(ls): result[i] = ary[i] return result def is_field_equal(a, b): if is_obj_array(a): return is_obj_array(b) and (a.shape == b.shape) and (a == b).all() else: return not is_obj_array(b) and a == b def make_obj_array(res_list): result = np.empty((len(res_list),), dtype=object) for i, v in enumerate(res_list): result[i] = v return result def setify_field(f): from hedge.tools import is_obj_array if is_obj_array(f): return set(f) else: return set([f]) def obj_array_to_hashable(f): if is_obj_array(f): return tuple(f) else: return f hashable_field = MovedFunctionDeprecationWrapper(obj_array_to_hashable) def obj_array_equal(a, b): a_is_oa = is_obj_array(a) assert a_is_oa == is_obj_array(b) if a_is_oa: return np.array_equal(a, b) else: return a == b field_equal = MovedFunctionDeprecationWrapper(obj_array_equal) def join_fields(*args): res_list = [] for arg in args: if isinstance(arg, list): res_list.extend(arg) elif isinstance(arg, np.ndarray): if log_shape(arg) == (): res_list.append(arg) else: res_list.extend(arg.flat) else: res_list.append(arg) return make_obj_array(res_list) def log_shape(array): """Returns the "logical shape" of the array. The "logical shape" is the shape that's left when the node-depending dimension has been eliminated.""" try: if array.dtype.char == "O": return array.shape else: return array.shape[:-1] except AttributeError: return () def with_object_array_or_scalar(f, field, obj_array_only=False): if obj_array_only: if is_obj_array(field): ls = field.shape else: ls = () else: ls = log_shape(field) if ls != (): from pytools import indices_in_shape result = np.zeros(ls, dtype=object) for i in indices_in_shape(ls): result[i] = f(field[i]) return result else: return f(field) as_oarray_func = decorator(with_object_array_or_scalar) def with_object_array_or_scalar_n_args(f, *args): oarray_arg_indices = [] for i, arg in enumerate(args): if is_obj_array(arg): oarray_arg_indices.append(i) if not oarray_arg_indices: return f(*args) leading_oa_index = oarray_arg_indices[0] ls = log_shape(args[leading_oa_index]) if ls != (): from pytools import indices_in_shape result = np.zeros(ls, dtype=object) new_args = list(args) for i in indices_in_shape(ls): for arg_i in oarray_arg_indices: new_args[arg_i] = args[arg_i][i] result[i] = f(*new_args) return result else: return f(*args) as_oarray_func_n_args = decorator(with_object_array_or_scalar_n_args) def cast_field(field, dtype): return with_object_array_or_scalar( lambda f: f.astype(dtype), field) def oarray_real(ary): return with_object_array_or_scalar(lambda x: x.real, ary) def oarray_imag(ary): return with_object_array_or_scalar(lambda x: x.imag, ary) def oarray_real_copy(ary): return with_object_array_or_scalar(lambda x: x.real.copy(), ary) def oarray_imag_copy(ary): return with_object_array_or_scalar(lambda x: x.imag.copy(), ary) pytools-2015.1.6/pytools/persistent_dict.py0000644000175000017500000002753012610520312022020 0ustar andreasandreas00000000000000"""Generic persistent, concurrent dictionary-like facility.""" from __future__ import division, with_statement from __future__ import absolute_import import six __copyright__ = "Copyright (C) 2011,2014 Andreas Kloeckner" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import logging logger = logging.getLogger(__name__) import sys import os import errno try: import hashlib new_hash = hashlib.sha256 except ImportError: # for Python << 2.5 import sha new_hash = sha.new def _erase_dir(dir): from os import listdir, unlink, rmdir from os.path import join, isdir for name in listdir(dir): sub_name = join(dir, name) if isdir(sub_name): _erase_dir(sub_name) else: unlink(sub_name) rmdir(dir) def update_checksum(checksum, obj): if isinstance(obj, six.text_type): checksum.update(obj.encode("utf8")) else: checksum.update(obj) # {{{ cleanup managers class CleanupBase(object): pass class CleanupManager(CleanupBase): def __init__(self): self.cleanups = [] def register(self, c): self.cleanups.insert(0, c) def clean_up(self): for c in self.cleanups: c.clean_up() def error_clean_up(self): for c in self.cleanups: c.error_clean_up() class LockManager(CleanupBase): def __init__(self, cleanup_m, container_dir): if container_dir is not None: self.lock_file = os.path.join(container_dir, "lock") attempts = 0 while True: try: self.fd = os.open(self.lock_file, os.O_CREAT | os.O_WRONLY | os.O_EXCL) break except OSError: pass from time import sleep sleep(1) attempts += 1 if attempts > 10: from warnings import warn warn("could not obtain lock--delete '%s' if necessary" % self.lock_file) cleanup_m.register(self) def clean_up(self): import os os.close(self.fd) os.unlink(self.lock_file) def error_clean_up(self): pass class ItemDirManager(CleanupBase): def __init__(self, cleanup_m, path): from os import mkdir import errno self.path = path try: mkdir(self.path) except OSError as e: if e.errno != errno.EEXIST: raise self.existed = True else: cleanup_m.register(self) self.existed = False def sub(self, n): from os.path import join return join(self.path, n) def reset(self): try: _erase_dir(self.path) except OSError as e: if e.errno != errno.ENOENT: raise try: os.mkdir(self.path) except OSError as e: if e.errno != errno.EEXIST: raise def clean_up(self): pass def error_clean_up(self): _erase_dir(self.path) # }}} # {{{ key generation class KeyBuilder(object): def rec(self, key_hash, key): try: method = key.update_persistent_hash except AttributeError: pass else: method(key_hash, self) return try: method = getattr(self, "update_for_"+type(key).__name__) except AttributeError: pass else: method(key_hash, key) return raise TypeError("unsupported type for persistent hash keying: %s" % type(key)) def __call__(self, key): key_hash = new_hash() self.rec(key_hash, key) return key_hash.hexdigest() # {{{ updaters def update_for_int(self, key_hash, key): key_hash.update(str(key).encode("utf8")) update_for_long = update_for_int update_for_bool = update_for_int def update_for_float(self, key_hash, key): key_hash.update(repr(key).encode("utf8")) if sys.version_info >= (3,): def update_for_str(self, key_hash, key): key_hash.update(key.encode('utf8')) def update_for_bytes(self, key_hash, key): key_hash.update(key) else: def update_for_str(self, key_hash, key): key_hash.update(key) def update_for_unicode(self, key_hash, key): key_hash.update(key.encode('utf8')) def update_for_tuple(self, key_hash, key): for obj_i in key: self.rec(key_hash, obj_i) def update_for_frozenset(self, key_hash, key): for set_key in sorted(key): self.rec(key_hash, set_key) def update_for_NoneType(self, key_hash, key): # noqa key_hash.update("".encode('utf8')) def update_for_dtype(self, key_hash, key): return key.str.encode("utf8") # }}} # }}} # {{{ top-level class NoSuchEntryError(KeyError): pass class PersistentDict(object): def __init__(self, identifier, key_builder=None, container_dir=None): """ :arg identifier: a file-name-compatible string identifying this dictionary :arg key_builder: a subclass of :class:`KeyBuilder` """ self.identifier = identifier if key_builder is None: key_builder = KeyBuilder() self.key_builder = key_builder from os.path import join if container_dir is None: import appdirs container_dir = join( appdirs.user_cache_dir("pytools", "pytools"), "pdict-v2-%s-py%s" % ( identifier, ".".join(str(i) for i in sys.version_info),)) self.container_dir = container_dir self._make_container_dir() def _make_container_dir(self): # {{{ ensure container directory exists try: os.makedirs(self.container_dir) except OSError as e: from errno import EEXIST if e.errno != EEXIST: raise # }}} def store(self, key, value, info_files={}): hexdigest_key = self.key_builder(key) cleanup_m = CleanupManager() try: try: LockManager(cleanup_m, self.container_dir) from os.path import join item_dir_m = ItemDirManager(cleanup_m, join(self.container_dir, hexdigest_key)) if item_dir_m.existed: item_dir_m.reset() for info_name, info_value in six.iteritems(info_files): info_path = item_dir_m.sub("info_"+info_name) with open(info_path, "wt") as outf: outf.write(info_value) from six.moves.cPickle import dump, HIGHEST_PROTOCOL value_path = item_dir_m.sub("contents") with open(value_path, "wb") as outf: dump(value, outf, protocol=HIGHEST_PROTOCOL) # Write key last, so that if the reader below key_path = item_dir_m.sub("key") with open(key_path, "wb") as outf: dump(key, outf, protocol=HIGHEST_PROTOCOL) except: cleanup_m.error_clean_up() raise finally: cleanup_m.clean_up() def fetch(self, key): hexdigest_key = self.key_builder(key) from os.path import join, isdir item_dir = join(self.container_dir, hexdigest_key) if not isdir(item_dir): raise NoSuchEntryError(key) cleanup_m = CleanupManager() try: try: LockManager(cleanup_m, self.container_dir) item_dir_m = ItemDirManager(cleanup_m, item_dir) key_path = item_dir_m.sub("key") value_path = item_dir_m.sub("contents") from six.moves.cPickle import load # {{{ load key file exc = None try: with open(key_path, "rb") as inf: read_key = load(inf) except IOError as e: exc = e except EOFError as e: exc = e if exc is not None: item_dir_m.reset() from warnings import warn warn("pytools.persistent_dict.PersistentDict(%s) " "encountered an invalid " "key file for key %s. Entry deleted." % (self.identifier, hexdigest_key)) raise NoSuchEntryError(key) # }}} if read_key != key: # Key collision, oh well. logger.debug("key collsion in cache at '%s'" % self.container_dir) raise NoSuchEntryError(key) # {{{ load value exc = None try: with open(value_path, "rb") as inf: read_contents = load(inf) except IOError as e: exc = e except EOFError as e: exc = e if exc is not None: item_dir_m.reset() from warnings import warn warn("pytools.persistent_dict.PersistentDict(%s) " "encountered an invalid " "key file for key %s. Entry deleted." % (self.identifier, hexdigest_key)) raise NoSuchEntryError(key) # }}} return read_contents except: cleanup_m.error_clean_up() raise finally: cleanup_m.clean_up() def remove(self, key): hexdigest_key = self.key_builder(key) from os.path import join, isdir item_dir = join(self.container_dir, hexdigest_key) if not isdir(item_dir): raise NoSuchEntryError(key) cleanup_m = CleanupManager() try: try: LockManager(cleanup_m, self.container_dir) item_dir_m = ItemDirManager(cleanup_m, item_dir) item_dir_m.reset() except: cleanup_m.error_clean_up() raise finally: cleanup_m.clean_up() def __getitem__(self, key): return self.fetch(key) def __setitem__(self, key, value): return self.store(key, value) def __delitem__(self, key): self.remove(key) def clear(self): try: _erase_dir(self.container_dir) except OSError as e: if e.errno != errno.ENOENT: raise self._make_container_dir() # }}} # vim: foldmethod=marker pytools-2015.1.6/pytools/prefork.py0000644000175000017500000001230512610520312020257 0ustar andreasandreas00000000000000"""OpenMPI, once intialized, prohibits forking. This helper module allows the forking of *one* helper child process before OpenMPI initializaton that can do the forking for the fork-challenged parent process. Since none of this is MPI-specific, it got parked in pytools. """ from __future__ import absolute_import class ExecError(OSError): pass class DirectForker: @staticmethod def call(cmdline, cwd=None): from subprocess import call try: return call(cmdline, cwd=cwd) except OSError as e: raise ExecError("error invoking '%s': %s" % ( " ".join(cmdline), e)) @staticmethod def call_capture_stdout(cmdline, cwd=None): from subprocess import Popen, PIPE try: return Popen(cmdline, cwd=cwd, stdin=PIPE, stdout=PIPE, stderr=PIPE).communicate()[0] except OSError as e: raise ExecError("error invoking '%s': %s" % ( " ".join(cmdline), e)) @staticmethod def call_capture_output(cmdline, cwd=None, error_on_nonzero=True): """ :returns: a tuple (return code, stdout_data, stderr_data). """ from subprocess import Popen, PIPE try: popen = Popen(cmdline, cwd=cwd, stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout_data, stderr_data = popen.communicate() if error_on_nonzero and popen.returncode: raise ExecError("status %d invoking '%s': %s" % (popen.returncode, " ".join(cmdline), stderr_data)) return popen.returncode, stdout_data, stderr_data except OSError as e: raise ExecError("error invoking '%s': %s" % ( " ".join(cmdline), e)) def _send_packet(sock, data): from struct import pack from six.moves.cPickle import dumps packet = dumps(data) sock.sendall(pack("I", len(packet))) sock.sendall(packet) def _recv_packet(sock, who="Process", partner="other end"): from struct import calcsize, unpack size_bytes_size = calcsize("I") size_bytes = sock.recv(size_bytes_size) if len(size_bytes) < size_bytes_size: from warnings import warn warn("%s exiting upon apparent death of %s" % (who, partner)) raise SystemExit size, = unpack("I", size_bytes) packet = b"" while len(packet) < size: packet += sock.recv(size) from six.moves.cPickle import loads return loads(packet) def _fork_server(sock): import signal # ignore keyboard interrupts, we'll get notified by the parent. signal.signal(signal.SIGINT, signal.SIG_IGN) quitflag = [False] def quit(): quitflag[0] = True funcs = { "quit": quit, "call": DirectForker.call, "call_capture_stdout": DirectForker.call_capture_stdout, "call_capture_output": DirectForker.call_capture_output, } try: while not quitflag[0]: func_name, args, kwargs = _recv_packet(sock, who="Prefork server", partner="parent") try: result = funcs[func_name](*args, **kwargs) except Exception as e: _send_packet(sock, ("exception", e)) else: _send_packet(sock, ("ok", result)) finally: sock.close() import os os._exit(0) class IndirectForker: def __init__(self, server_pid, sock): self.server_pid = server_pid self.socket = sock def _remote_invoke(self, name, *args, **kwargs): _send_packet(self.socket, (name, args, kwargs)) status, result = _recv_packet(self.socket, who="Prefork client", partner="prefork server") if status == "exception": raise result elif status == "ok": return result def _quit(self): self._remote_invoke("quit") from os import waitpid waitpid(self.server_pid, 0) def call(self, cmdline, cwd=None): return self._remote_invoke("call", cmdline, cwd) def call_capture_stdout(self, cmdline, cwd=None): return self._remote_invoke("call_capture_stdout", cmdline, cwd) def call_capture_output(self, cmdline, cwd=None, error_on_nonzero=True): return self._remote_invoke("call_capture_output", cmdline, cwd, error_on_nonzero) def enable_prefork(): if isinstance(forker[0], IndirectForker): return from socket import socketpair s_parent, s_child = socketpair() from os import fork fork_res = fork() if fork_res == 0: # child s_parent.close() _fork_server(s_child) else: s_child.close() forker[0] = IndirectForker(fork_res, s_parent) import atexit atexit.register(forker[0]._quit) forker = [DirectForker()] def call(cmdline, cwd=None): return forker[0].call(cmdline, cwd) def call_capture_stdout(cmdline, cwd=None): from warnings import warn warn("call_capture_stdout is deprecated: use call_capture_output instead", stacklevel=2) return forker[0].call_capture_stdout(cmdline, cwd) def call_capture_output(cmdline, cwd=None, error_on_nonzero=True): return forker[0].call_capture_output(cmdline, cwd, error_on_nonzero) pytools-2015.1.6/pytools/py_codegen.py0000644000175000017500000000714712610520312020733 0ustar andreasandreas00000000000000from __future__ import division, with_statement __copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ # loosely based on # http://effbot.org/zone/python-code-generator.htm class Indentation(object): def __init__(self, generator): self.generator = generator def __enter__(self): self.generator.indent() def __exit__(self, exc_type, exc_val, exc_tb): self.generator.dedent() class PythonCodeGenerator(object): def __init__(self): self.preamble = [] self.code = [] self.level = 0 def extend(self, sub_generator): for line in sub_generator.code: self.code.append(" "*(4*self.level) + line) def get(self): result = "\n".join(self.code) if self.preamble: result = "\n".join(self.preamble) + "\n" + result return result def add_to_preamble(self, s): self.preamble.append(s) def __call__(self, s): if not s.strip(): self.code.append("") else: if "\n" in s: s = remove_common_indentation(s) for l in s.split("\n"): self.code.append(" "*(4*self.level) + l) def indent(self): self.level += 1 def dedent(self): if self.level == 0: raise RuntimeError("internal error in python code generator") self.level -= 1 class PythonFunctionGenerator(PythonCodeGenerator): def __init__(self, name, args): PythonCodeGenerator.__init__(self) self.name = name self("def %s(%s):" % (name, ", ".join(args))) self.indent() def get_function(self): result_dict = {} source_text = self.get() exec(compile(source_text.rstrip()+"\n", "" % self.name, "exec"), result_dict) func = result_dict[self.name] result_dict["_MODULE_SOURCE_CODE"] = source_text return func # {{{ remove common indentation def remove_common_indentation(code, require_leading_newline=True): if "\n" not in code: return code if require_leading_newline and not code.startswith("\n"): return code lines = code.split("\n") while lines[0].strip() == "": lines.pop(0) while lines[-1].strip() == "": lines.pop(-1) if lines: base_indent = 0 while lines[0][base_indent] in " \t": base_indent += 1 for line in lines[1:]: if line[:base_indent].strip(): raise ValueError("inconsistent indentation") return "\n".join(line[base_indent:] for line in lines) # }}} pytools-2015.1.6/pytools/spatial_btree.py0000644000175000017500000001230012610520312021420 0ustar andreasandreas00000000000000from __future__ import division from __future__ import absolute_import from six.moves import range def do_boxes_intersect(xxx_todo_changeme, xxx_todo_changeme1): (bl1,tr1) = xxx_todo_changeme (bl2,tr2) = xxx_todo_changeme1 (dimension,) = bl1.shape for i in range(0, dimension): if max(bl1[i], bl2[i]) > min(tr1[i], tr2[i]): return False return True def _get_elements_bounding_box(elements): import numpy if len(elements) == 0: raise RuntimeError("Cannot get the bounding box of no elements.") bboxes = [box for el,box in elements] bottom_lefts = [bl for bl,tr in bboxes] top_rights = [tr for bl,tr in bboxes] return numpy.minimum.reduce(bottom_lefts), numpy.minimum.reduce(top_rights) def make_buckets(bottom_left, top_right, allbuckets): import numpy (dimensions,) = bottom_left.shape half = (top_right - bottom_left) / 2. def do(dimension, pos): if dimension == dimensions: origin = bottom_left + pos*half bucket = SpatialBinaryTreeBucket(origin, origin + half) allbuckets.append(bucket) return bucket else: pos[dimension] = 0 first = do(dimension + 1, pos) pos[dimension] = 1 second = do(dimension + 1, pos) return [first, second] return do(0, numpy.zeros((dimensions,), numpy.float64)) class SpatialBinaryTreeBucket: """This class represents one bucket in a spatial binary tree. It automatically decides whether it needs to create more subdivisions beneath itself or not. :ivar elements: a list of tuples *(element, bbox)* where bbox is again a tuple *(lower_left, upper_right)* of :class:`numpy.ndarray` instances satisfying *(lower_right <= upper_right).all()*. """ def __init__(self, bottom_left, top_right): """:param bottom_left: A :mod: 'numpy' array of the minimal coordinates of the box being partitioned. :param top_right: A :mod: 'numpy' array of the maximal coordinates of the box being partitioned.""" self.elements = [] self.bottom_left = bottom_left self.top_right = top_right self.center = (bottom_left + top_right) / 2 # As long as buckets is None, there are no subdivisions self.buckets = None self.elements = [] def insert(self, element, bbox): """Insert an element into the spatial tree. :param element: the element to be stored in the retrieval data structure. It is treated as opaque and no assumptions are made on it. :param bbox: A bounding box supplied as a tuple *lower_left, upper_right* of :mod:`numpy` vectors, such that *(lower_right <= upper_right).all()*. Despite these names, the bounding box (and this entire data structure) may be of any dimension. """ def insert_into_subdivision(element, bbox): for bucket in self.all_buckets: if do_boxes_intersect((bucket.bottom_left, bucket.top_right), bbox): bucket.insert(element, bbox) (dimensions,) = self.bottom_left.shape if self.buckets is None: # No subdivisions yet. if len(self.elements) > 8 * 2**dimensions: # Too many elements. Need to subdivide. self.all_buckets = [] self.buckets = make_buckets(self.bottom_left, self.top_right, self.all_buckets) # Move all elements from the full bucket into the new finer ones for el, el_bbox in self.elements: insert_into_subdivision(el, el_bbox) # Free up some memory. Elements are now stored in the # subdivision, so we don't need them here any more. del self.elements insert_into_subdivision(element, bbox) else: # Simple: self.elements.append((element, bbox)) else: # Go find which sudivision to place element insert_into_subdivision(element, bbox) def generate_matches(self, point): if self.buckets: # We have subdivisions. Use them. (dimensions,) = point.shape bucket = self.buckets for dim in range(dimensions): if point[dim] < self.center[dim]: bucket = bucket[0] else: bucket = bucket[1] for result in bucket.generate_matches(point): yield result else: # We don't. Perform linear search. for el, bbox in self.elements: yield el def visualize(self, file): file.write("%f %f\n" % (self.bottom_left[0], self.bottom_left[1])); file.write("%f %f\n" % (self.top_right[0], self.bottom_left[1])); file.write("%f %f\n" % (self.top_right[0], self.top_right[1])); file.write("%f %f\n" % (self.bottom_left[0], self.top_right[1])); file.write("%f %f\n\n" % (self.bottom_left[0], self.bottom_left[1])); if self.buckets: for i in self.all_buckets: i.visualize(file) pytools-2015.1.6/pytools/stopwatch.py0000644000175000017500000000354312610520312020627 0ustar andreasandreas00000000000000from __future__ import division from __future__ import absolute_import from __future__ import print_function import time import pytools class StopWatch: def __init__(self): self.Elapsed = 0. self.LastStart = None def start(self): assert self.LastStart is None self.LastStart = time.time() return self def stop(self): assert self.LastStart is not None self.Elapsed += time.time() - self.LastStart self.LastStart = None return self def elapsed(self): if self.LastStart: return time.time() - self.LastStart + self.Elapsed else: return self.Elapsed class Job: def __init__(self, name): self.Name = name self.StopWatch = StopWatch().start() if self.is_visible(): print("%s..." % name) def done(self): elapsed = self.StopWatch.elapsed() JOB_TIMES[self.Name] += elapsed if self.is_visible(): print(" " * (len(self.Name) + 2), elapsed, "seconds") def is_visible(self): if PRINT_JOBS.get(): return not self.Name in HIDDEN_JOBS else: return self.Name in VISIBLE_JOBS class EtaEstimator: def __init__(self, total_steps): self.stopwatch = StopWatch().start() self.total_steps = total_steps assert total_steps > 0 def estimate(self, done): fraction_done = done/self.total_steps time_spent = self.stopwatch.elapsed() if fraction_done > 1e-5: return time_spent/fraction_done-time_spent else: return None def print_job_summary(): for key in JOB_TIMES: print(key, " " * (50-len(key)), JOB_TIMES[key]) HIDDEN_JOBS = [] VISIBLE_JOBS = [] JOB_TIMES = pytools.DictionaryWithDefault(lambda x: 0) PRINT_JOBS = pytools.Reference(True) pytools-2015.1.6/pytools/test.py0000644000175000017500000000040012610520312017557 0ustar andreasandreas00000000000000from __future__ import absolute_import try: from py.test import mark as mark_test except ImportError: class _Mark: def __getattr__(self, name): def dec(f): return f return dec mark_test = _Mark() pytools-2015.1.6/pytools.egg-info/0000755000175000017500000000000012613227172017741 5ustar andreasandreas00000000000000pytools-2015.1.6/pytools.egg-info/PKG-INFO0000644000175000017500000000375112613227171021043 0ustar andreasandreas00000000000000Metadata-Version: 1.1 Name: pytools Version: 2015.1.6 Summary: A collection of tools for Python Home-page: http://pypi.python.org/pypi/pytools Author: Andreas Kloeckner Author-email: inform@tiker.net License: MIT Description: Pytools is a big bag of things that are "missing" from the Python standard library. This is mainly a dependency of my other software packages, and is probably of little interest to you unless you use those. If you're curious nonetheless, here's what's on offer: * A ton of small tool functions such as `len_iterable`, `argmin`, tuple generation, permutation generation, ASCII table pretty printing, GvR's mokeypatch_xxx() hack, the elusive `flatten`, and much more. * Michele Simionato's decorator module * A time-series logging module, `pytools.log`. * Batch job submission, `pytools.batchjob`. * A lexer, `pytools.lex`. Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Other Audience Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: MIT License Classifier: Natural Language :: English Classifier: Programming Language :: Python Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Scientific/Engineering :: Information Analysis Classifier: Topic :: Scientific/Engineering :: Mathematics Classifier: Topic :: Scientific/Engineering :: Visualization Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities pytools-2015.1.6/pytools.egg-info/SOURCES.txt0000644000175000017500000000130012613227172021617 0ustar andreasandreas00000000000000MANIFEST.in README setup.cfg setup.py pytools/__init__.py pytools/arithmetic_container.py pytools/batchjob.py pytools/convergence.py pytools/datatable.py pytools/debug.py pytools/decorator.py pytools/diskdict.py pytools/lex.py pytools/log.py pytools/mpi.py pytools/mpiwrap.py pytools/obj_array.py pytools/persistent_dict.py pytools/prefork.py pytools/py_codegen.py pytools/spatial_btree.py pytools/stopwatch.py pytools/test.py pytools.egg-info/PKG-INFO pytools.egg-info/SOURCES.txt pytools.egg-info/dependency_links.txt pytools.egg-info/pbr.json pytools.egg-info/requires.txt pytools.egg-info/top_level.txt test/test_data_table.py test/test_math_stuff.py test/test_persistent_dict.py test/test_pytools.pypytools-2015.1.6/pytools.egg-info/dependency_links.txt0000644000175000017500000000000112613227171024006 0ustar andreasandreas00000000000000 pytools-2015.1.6/pytools.egg-info/pbr.json0000644000175000017500000000005712611473324021421 0ustar andreasandreas00000000000000{"is_release": false, "git_version": "1de81c9"}pytools-2015.1.6/pytools.egg-info/requires.txt0000644000175000017500000000005212613227171022335 0ustar andreasandreas00000000000000decorator>=3.2.0 appdirs>=1.4.0 six>=1.8.0pytools-2015.1.6/pytools.egg-info/top_level.txt0000644000175000017500000000001012613227171022461 0ustar andreasandreas00000000000000pytools pytools-2015.1.6/test/0000755000175000017500000000000012613227172015515 5ustar andreasandreas00000000000000pytools-2015.1.6/test/test_data_table.py0000644000175000017500000000521212610520312021173 0ustar andreasandreas00000000000000from __future__ import division from __future__ import absolute_import from six.moves import range from six.moves import zip # data from Wikipedia "join" article def get_dept_table(): from pytools.datatable import DataTable dept_table = DataTable(["id", "name"]) dept_table.insert_row((31, "Sales")) dept_table.insert_row((33, "Engineering")) dept_table.insert_row((34, "Clerical")) dept_table.insert_row((35, "Marketing")) return dept_table def get_employee_table(): from pytools.datatable import DataTable employee_table = DataTable(["lastname", "dept"]) employee_table.insert_row(("Rafferty", 31)) employee_table.insert_row(("Jones", 33)) employee_table.insert_row(("Jasper", 36)) employee_table.insert_row(("Steinberg", 33)) employee_table.insert_row(("Robinson", 34)) employee_table.insert_row(("Smith", 34)) return employee_table def test_len(): et = get_employee_table() assert len(et) == 6 def test_iter(): et = get_employee_table() count = 0 for row in et: count += 1 assert len(row) == 2 assert count == 6 def test_insert_and_get(): et = get_employee_table() et.insert(dept=33, lastname="Kloeckner") assert et.get(lastname="Kloeckner").dept == 33 def test_filtered(): et = get_employee_table() assert len(et.filtered(dept=33)) == 2 assert len(et.filtered(dept=34)) == 2 def test_sort(): et = get_employee_table() et.sort(["lastname"]) assert et.column_data("dept") == [36, 33, 31, 34, 34, 33] def test_aggregate(): et = get_employee_table() et.sort(["dept"]) agg = et.aggregated(["dept"], "lastname", lambda lst: ",".join(lst)) assert len(agg) == 4 for dept, lastnames in agg: lastnames = lastnames.split(",") for lastname in lastnames: assert et.get(lastname=lastname).dept == dept def test_aggregate_2(): from pytools.datatable import DataTable tbl = DataTable(["step", "value"], list(zip(list(range(20)), list(range(20))))) agg = tbl.aggregated(["step"], "value", max) assert agg.column_data("step") == list(range(20)) assert agg.column_data("value") == list(range(20)) def test_join(): et = get_employee_table() dt = get_dept_table() et.sort(["dept"]) dt.sort(["id"]) inner_joined = et.join("dept", "id", dt) assert len(inner_joined) == len(et)-1 for dept, lastname, deptname in inner_joined: dept_id = et.get(lastname=lastname).dept assert dept_id == dept assert dt.get(id=dept_id).name == deptname outer_joined = et.join("dept", "id", dt, outer=True) assert len(outer_joined) == len(et)+1 pytools-2015.1.6/test/test_math_stuff.py0000644000175000017500000000102212610520312021246 0ustar andreasandreas00000000000000from __future__ import division from __future__ import absolute_import def test_variance(): data = [4, 7, 13, 16] def naive_var(data): n = len(data) return (( sum(di**2 for di in data) - sum(data)**2/n) /(n-1)) from pytools import variance orig_variance = variance(data, entire_pop=False) assert abs(naive_var(data) - orig_variance) < 1e-15 data = [1e9 + x for x in data] assert abs(variance(data, entire_pop=False) - orig_variance) < 1e-15 pytools-2015.1.6/test/test_persistent_dict.py0000644000175000017500000000216312610520312022320 0ustar andreasandreas00000000000000from __future__ import division, with_statement from __future__ import absolute_import import pytest # noqa import sys # noqa from six.moves import range from six.moves import zip def test_persistent_dict(): from pytools.persistent_dict import PersistentDict pdict = PersistentDict("pytools-test") pdict.clear() from random import randrange def rand_str(n=20): return "".join( chr(65+randrange(26)) for i in range(n)) keys = [(randrange(2000), rand_str(), None) for i in range(20)] values = [randrange(2000) for i in range(20)] d = dict(list(zip(keys, values))) for k, v in zip(keys, values): pdict[k] = v pdict.store(k, v, info_files={"hey": str(v)}) for k, v in list(d.items()): assert d[k] == pdict[k] for k, v in zip(keys, values): pdict.store(k, v+1, info_files={"hey": str(v)}) for k, v in list(d.items()): assert d[k] + 1 == pdict[k] if __name__ == "__main__": if len(sys.argv) > 1: exec(sys.argv[1]) else: from py.test.cmdline import main main([__file__]) pytools-2015.1.6/test/test_pytools.py0000644000175000017500000000543212610520312020630 0ustar andreasandreas00000000000000from __future__ import division, with_statement from __future__ import absolute_import import pytest import sys # noqa @pytest.mark.skipif("sys.version_info < (2, 5)") def test_memoize_method_clear(): from pytools import memoize_method class SomeClass: def __init__(self): self.run_count = 0 @memoize_method def f(self): self.run_count += 1 return 17 sc = SomeClass() sc.f() sc.f() assert sc.run_count == 1 sc.f.clear_cache(sc) def test_memoize_method_with_uncached(): from pytools import memoize_method_with_uncached class SomeClass: def __init__(self): self.run_count = 0 @memoize_method_with_uncached(uncached_args=[1], uncached_kwargs=["z"]) def f(self, x, y, z): self.run_count += 1 return 17 sc = SomeClass() sc.f(17, 18, z=19) sc.f(17, 19, z=20) assert sc.run_count == 1 sc.f(18, 19, z=20) assert sc.run_count == 2 sc.f.clear_cache(sc) def test_memoize_method_nested(): from pytools import memoize_method_nested class SomeClass: def __init__(self): self.run_count = 0 def f(self): @memoize_method_nested def inner(x): self.run_count += 1 return 2*x inner(5) inner(5) sc = SomeClass() sc.f() assert sc.run_count == 1 def test_p_convergence_verifier(): pytest.importorskip("numpy") from pytools.convergence import PConvergenceVerifier pconv_verifier = PConvergenceVerifier() for order in [2, 3, 4, 5]: pconv_verifier.add_data_point(order, 0.1**order) pconv_verifier() pconv_verifier = PConvergenceVerifier() for order in [2, 3, 4, 5]: pconv_verifier.add_data_point(order, 0.5**order) pconv_verifier() pconv_verifier = PConvergenceVerifier() for order in [2, 3, 4, 5]: pconv_verifier.add_data_point(order, 2) with pytest.raises(AssertionError): pconv_verifier() def test_memoize(): from pytools import memoize count = [0] @memoize(use_kwargs=True) def f(i, j=1): count[0] += 1 return i + j assert f(1) == 2 assert f(1, 2) == 3 assert f(2, j=3) == 5 assert count[0] == 3 assert f(1) == 2 assert f(1, 2) == 3 assert f(2, j=3) == 5 assert count[0] == 3 def test_memoize_keyfunc(): from pytools import memoize count = [0] @memoize(key=lambda i, j=(1,): (i, len(j))) def f(i, j=(1,)): count[0] += 1 return i + len(j) assert f(1) == 2 assert f(1, [2]) == 2 assert f(2, j=[2, 3]) == 4 assert count[0] == 2 assert f(1) == 2 assert f(1, (2,)) == 2 assert f(2, j=(2, 3)) == 4 assert count[0] == 2 pytools-2015.1.6/MANIFEST.in0000644000175000017500000000000012610520312016247 0ustar andreasandreas00000000000000pytools-2015.1.6/README0000644000175000017500000000025112610520312015401 0ustar andreasandreas00000000000000Miscellaneous Python lifesavers. Andreas Kloeckner Includes Michele Simionato's decorator module, from http://www.phyast.pitt.edu/~micheles/python/ pytools-2015.1.6/setup.py0000644000175000017500000000427212613227140016250 0ustar andreasandreas00000000000000#! /usr/bin/env python # -*- coding: latin1 -*- from setuptools import setup setup(name="pytools", version="2015.1.6", description="A collection of tools for Python", long_description=""" Pytools is a big bag of things that are "missing" from the Python standard library. This is mainly a dependency of my other software packages, and is probably of little interest to you unless you use those. If you're curious nonetheless, here's what's on offer: * A ton of small tool functions such as `len_iterable`, `argmin`, tuple generation, permutation generation, ASCII table pretty printing, GvR's mokeypatch_xxx() hack, the elusive `flatten`, and much more. * Michele Simionato's decorator module * A time-series logging module, `pytools.log`. * Batch job submission, `pytools.batchjob`. * A lexer, `pytools.lex`. """, classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Other Audience', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Scientific/Engineering :: Mathematics', 'Topic :: Scientific/Engineering :: Visualization', 'Topic :: Software Development :: Libraries', 'Topic :: Utilities', ], install_requires=[ "decorator>=3.2.0", "appdirs>=1.4.0", "six>=1.8.0", ], author="Andreas Kloeckner", url="http://pypi.python.org/pypi/pytools", author_email="inform@tiker.net", license="MIT", packages=["pytools"]) pytools-2015.1.6/PKG-INFO0000644000175000017500000000375112613227172015641 0ustar andreasandreas00000000000000Metadata-Version: 1.1 Name: pytools Version: 2015.1.6 Summary: A collection of tools for Python Home-page: http://pypi.python.org/pypi/pytools Author: Andreas Kloeckner Author-email: inform@tiker.net License: MIT Description: Pytools is a big bag of things that are "missing" from the Python standard library. This is mainly a dependency of my other software packages, and is probably of little interest to you unless you use those. If you're curious nonetheless, here's what's on offer: * A ton of small tool functions such as `len_iterable`, `argmin`, tuple generation, permutation generation, ASCII table pretty printing, GvR's mokeypatch_xxx() hack, the elusive `flatten`, and much more. * Michele Simionato's decorator module * A time-series logging module, `pytools.log`. * Batch job submission, `pytools.batchjob`. * A lexer, `pytools.lex`. Platform: UNKNOWN Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Other Audience Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: MIT License Classifier: Natural Language :: English Classifier: Programming Language :: Python Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.6 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Scientific/Engineering :: Information Analysis Classifier: Topic :: Scientific/Engineering :: Mathematics Classifier: Topic :: Scientific/Engineering :: Visualization Classifier: Topic :: Software Development :: Libraries Classifier: Topic :: Utilities pytools-2015.1.6/setup.cfg0000644000175000017500000000025412613227172016360 0ustar andreasandreas00000000000000[flake8] ignore = E126,E127,E128,E123,E226,E241,E242,E265,E402,W503 max-line-length = 85 [wheel] universal = 1 [egg_info] tag_build = tag_date = 0 tag_svn_revision = 0