pax_global_header00006660000000000000000000000064141600125710014507gustar00rootroot0000000000000052 comment=61fc6a9494199c539390a0776c43d22e127cc5a8 funcy-1.17/000077500000000000000000000000001416001257100125635ustar00rootroot00000000000000funcy-1.17/.github/000077500000000000000000000000001416001257100141235ustar00rootroot00000000000000funcy-1.17/.github/workflows/000077500000000000000000000000001416001257100161605ustar00rootroot00000000000000funcy-1.17/.github/workflows/test.yml000066400000000000000000000024001416001257100176560ustar00rootroot00000000000000name: CI on: push: branches: - master pull_request: jobs: lint: runs-on: ubuntu-18.04 steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 with: python-version: 3.8 - name: Lint run: | pip install flake8 flake8 funcy flake8 --select=F,E5,W tests docs: runs-on: ubuntu-18.04 steps: - uses: actions/checkout@v2 - name: Set up Python 3.8 uses: actions/setup-python@v2 with: python-version: 3.8 - name: Build docs working-directory: ./docs run: | pip install -r requirements.txt sphinx-build -b html -W . _build/html test: runs-on: ubuntu-18.04 strategy: fail-fast: false matrix: python-version: ["2.7", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "3.10", "pypy2", "pypy3"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install -r test_requirements.txt - name: Run tests run: pytest -W error funcy-1.17/.gitignore000066400000000000000000000001331416001257100145500ustar00rootroot00000000000000*.pyc dist *.egg-info build docs/_build .tags* .tox .coverage htmlcov .cache .pytest_cache funcy-1.17/.readthedocs.yaml000066400000000000000000000002501416001257100160070ustar00rootroot00000000000000version: 2 build: os: "ubuntu-20.04" tools: python: "3.8" sphinx: configuration: docs/conf.py python: install: - requirements: docs/requirements.txt funcy-1.17/CHANGELOG000066400000000000000000000267621416001257100140120ustar00rootroot000000000000001.17 - added del_in() - made throttle() and limit_error_rate() work on methods - added str and repr to Call objects - migrated CI to Github actions (thx to Bruno Alla) - fixed doc[string] for zip_dicts (Tal Einat) - fixed some inspect issues - minor doc fixes 1.16 - support Python 3.9 officially - unify @memoize() and @cache(): both have .skip/.memory/.invalidate/.invalidate_all now - support dynamic resulting exception in @reraise() (Laurens Duijvesteijn) - made () optional for @decorator-made decorators with kw-only args - added @throttle() - added has_path() (Denys Zorinets) - fixed autocurry kwargs handling 1.15 - made rpartial accept keyworded arguments (Ruan Comelli) - made `@cache.invalidate()` idempotent (Dmitry Vasilyanov) - made raiser() accept a string as a shortcut - fixed cheatsheat description for 'distinct' helper (tsouvarev) - fixed some seqs docstrings - fixed some typos (Tim Gates) 1.14 - stated Python 3.7 and 3.8 support - dropped Python 2.6 - added @wrap_prop() - added filter_errors param to @retry() - published nullcontext properly 1.13 - added @wrap_with() - added nullcontext 1.12 - added @cached_readonly - more introspection in @decorator decorators - documented @cached_property inheritance limitations - included tests in pypi sdist tarball (Tomáš Chvátal) 1.11 - switched docs and internals to Python 3 - improved docs: better texts and examples here and there - support Python 3.7 officially - added popups over functions everywhere in docs - accept any iterables of errors in flow utils - fixed walk_values() for defaultdicts with empty factory - fixed xmap() signature introspection - documented lzip() 1.10.3 - added repr_len param to various debug utils - dropped testing in Python 3.3 1.10.2 - support extended function semantics in iffy (Eric Prykhodko) - distribute as a universal wheel. 1.10.1 - use raise from in reraise() - fix @cache with mixed positional and keywords args (thx to adrian-dankiv) 1.10 - added @reraise() - added unit and threshold params to *_durations() utils - published and documented LazyObject - fixed iffy() default argument when action is not present (Dmytro Kabakchei) 1.9.1 - make where() skip nonexistent keys (Aleksei Voronov) - fixed r?curry() on funcy i?map(), i?mapcat() and merge_with() 1.9 - filled in docstrings and some names - better currying: - all *curry() now work with builtins and classes - autocurry() is robust in deciding when to call - deprecated autocurry() n arg - @memoize now exposes its memory and accepts key_func arg - @cache also accepts key_func and supports funcs with **kwargs - added omit() (Petr Melnikov) - fixed/hacked PyCharm import introspection - optimized i?reductions() in Python 3 - backported accumulate() to Python 2 1.8 - added count_reps() - published namespace class - added LazyObject (simplistic, experimental and not documented) - support class dicts in walk*(), select*(), compact(), project() and empty() - support Python 3 dict.keys(), .values() and .items() in walk*() and friends - fixed empty() on iterators - optimized chunking range() in Python 3 1.7.5 - fixed defaults in double @decorated function - fixed @decorator with more than one default 1.7.4 - better error message on call.missed_arg access - optimized call.arg access in @decorator 1.7.3 - support Python 3.6 officially - fix deprecation warnings in Python 3.5 and 3.6 1.7.2 - added cheatsheet - many fixes in docs - documented @post_processing() - fixed (print|log)_* on non-function callables 1.7.1 - fixed 3+ argument map() in Python 3 1.7 - support Python 3.5 officially - added group_values() - fixed i?partition_by() for non-boolean extended mapper - cleanups and optimizations in colls and seqs 1.6 - added i?tree_nodes() - added (log|print)_iter_durations() to debug utils - added lists support to get_in(), set_in() and update_in() - single argument takewhile() and dropwhile() - published iwhere(), ipluck(), ipluck_attr() and iinvoke() - support @retry() with list (not tuple) of errors (Zakhar Zibarov) - changed µs to mks in time messages - optimized update_in() 1.5 - added rcompose() - added i?tree_leaves() - added pluck_attr() (Marcus McCurdy) - added set_in() and update_in() - added get_in() (Swaroop) - fixed bug with flatten() follow not passed deep 1.4 - added rpartial() and rcurry() - support arguments in print_(calls|exits) - made print_(errors|durations) work both with and without arguments - made (log|print)_errors() work as context manager - made (log|print)_durations() work as context managers - pass func docstring to @cached_property 1.3 - added with_next() - added timeout argument to @retry() (rocco66) - support kwargs in @memoize'd functions (Lukasz Dobrzanski) - do not cut result repr in @(log|print)_calls() and @(log|print)_exits 1.2 - support pypy3 - added @contextmanager, ContextDecorator - added @(log|print)_(enters|exits) - print stack trace in @(log|print)_(calls|errors) - added label argument for tap() - better formatted call signatures in debug utilities - added itervalues() - exposed empty(), iteritems() - exposed @wraps and unwrap() - slightly optimized last() and nth() - fixed signatures of functions wrapped with @wrap_(mapper|selector) 1.1 - added merge_with() and join_with() - added @once, @once_per_args and @once_per() - added suppress() context manager - added is_set() - added name argument to @monkey - decorators created with @decorator now set __original__ attribute - optimized @decorator - optimized nth() and last() - lzip() is now exported by default from/for py3 Backward incompatible fixes: - made pluck(), where() and invoke() return interators in python 3 - __wrapped__ attribute added by decorators now correctly refers to immediate wrapped not innermost 1.0.0 - @silent, @ignore() and decorators created with @decorator will now work with method_descriptors and other non-wrappable callables. - chained decorators now have access to arguments by name - exposed cut_prefix() and cut_suffix() - optimized re_tester() - fixed @retry in python 3 Backward incompatible changes: - function made from dict will now use __getitem__ instead of get. Means possible KeyErrors for dicts and factory function calls for defaultdict. Use `a_dict.get` instead of just `a_dict` for old behaviour. - reverted imap(None, seq) to default strange behaviour. 0.10.1 - optimized @decorator 0.10 - added is_tuple() - raiser() can now be called without arguments, defaults to Exception - support del @cached_property - optimized and cleaned up @cached_property - optimized i?split(), split_at() and split_by() - optimized @memoize - optimized zipdict() Backward incompatible changes: - split(), split_at() and split_by() now return a tuple of two lists instead of list of them - @cached_property no longer uses _name to store cached value - partial() is now an alias to functools.partial, use func_partial() for old behaviour 0.9 - added experimental python 3 support - added python 2.6 support - added autocurry() - published idistinct(), isplit(), isplit_at(), isplit_by() - some optimizations 0.8 - added raiser() - added idistinct() - added key argument to i?distinct() - added key argument to is_distinct() - added group_by_keys() Backward incompatible changes: - walk_values() now updates defaultdict item factory to composition of mapper and old one - izip_dicts() now packs values in tuple separate from key - @decorator raises AttributeError not NameError when non-existent argument is accessed by name 0.7 - added i?flatten() - added pairwise() - added nth() - added is_seqcont() - greatly optimized @decorator - added @log_durations and @print_durations - @logs_calls and @print_calls now provide call signature on return - @logs_calls and @print_calls now log errors, optional for @log_calls - better call signature stringification for @(log|print)_(calls|errors) - fixed i?partition() and i?chunks() with xrange() Backward incompatible changes: - is_iter() now returns False given xrange() object 0.6.0 - added izip_values() and izip_dicts() - added last() and butlast() - added isnone() and notnone() primitives - added extended fn semantics to group_by(), count_by() and i?partition_by() - added fill argument to with_prev() - optimized ilen() 0.5.6 - fixed installation issue 0.5.5 - added count_by() - added i?partition_by() 0.5.4 - added @post_processing() flow utility - partition() and chunks() can handle iterators now - added ipartition() and ichunks() 0.5.3 - fixed decorators produced with @decorator over non-functions - optimized @ignore and @silent 0.5.2 - added i?without() - more and better docs Backward incompatible changes: - compact() now strips all falsy values not just None 0.5.1 - added ints and slices to extended fn semantics - added extended semantics to *_fn(), compose(), complement and i?juxt() - can now @monkey() patch modules - cached properties can now be set 0.5.0 - added type testing utilities - added @monkey - added cut_prefix() and cut_suffix() privately - added @silent_lookuper - exported @retry directly from from funcy - better support for arg introspection in @decorator Backward incompatible changes: - removed defaults for log_calls() and log_errors() - @make_lookuper decorated functions now will raise LookupError on memory miss, use @silent_lookuper for old behavior - call object in @decorator access to func, args and kwargs is now done through _func, _args and _kwargs 0.4.1 - decorators created with @decorator are now able to pass additional args and kwargs - @collecting, @joining() and @limit_error_rate() now exported directly from funcy - @tap(), @log_calls and @log_errors() now exported directly from funcy - added @print_calls and @print_errors - better handling passing None to optional parameter - docs for debugging utilities Backward incompatible changes: - @log renamed to @log_calls 0.4.0 - extended predicate/mapping semantics for seq and coll utils - added str_join() - added @collecting and @joining() - added sums() and isums() - better docs 0.3.4 - added with_prev() - added iterable() - support iterators in walk*(), select*(), empty() and project() - reexport itertools.chain() - faster curry - more docs 0.3.3 - added compact(), i?reductions() - added default argument to @ignore() - added tap() experimental debug utility - @make_lookuper() now works on functions with arguments - exposed ilen() publicly - added default argument to @ignore() - fix: join() and merge() now correctly fail when receive [None, ...] - better docs Backward incompatible changes: - renamed @memoize.lookup() to @make_lookuper() 0.3.2 - added ilen() - added some object helpers: namespace base class and @cached_property - more docs 0.3.1 - added @memoize.lookup() - more and better docs Backward incompatible changes: - removed generator based @decorator version - pluck() now accepts key as first parameter 0.3.0 - partial docs - added where(), pluck() and invoke() inspired by underscore - added split_by() - second() made public - reexport itertools.cycle() - walk() and select() work with strings now Backward incompatible changes: - renamed groupby() to group_by() - separated split_at() from split() - automatically unpack one-element tuples returned from re_*() - join() now returns None on empty input instead of TypeError - made fallback() accept multiple arguments Bugfixes: - fixed join() swallowing first coll from iterator of colls 0.2.1 - one argument keep() - fallback() flow 0.2 - added curry() to funcs - added re_test(), re_tester() and re_finder() to strings - added second() to seqs - added one() and one_fn() to colls and funcolls - support defaultdicts in walk*(), select*(), project(), empty() - one argument and uncallable default in iffy() funcy-1.17/LICENSE000066400000000000000000000027721416001257100136000ustar00rootroot00000000000000Copyright (c) 2012-2020, Alexander Schepanovski. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of funcy nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. funcy-1.17/MANIFEST.in000066400000000000000000000001371416001257100143220ustar00rootroot00000000000000include LICENSE include CHANGELOG include README.rst include VERSION recursive-include tests * funcy-1.17/README.rst000066400000000000000000000111431416001257100142520ustar00rootroot00000000000000Funcy |Build Status| ===== A collection of fancy functional tools focused on practicality. Inspired by clojure, underscore and my own abstractions. Keep reading to get an overview or `read the docs `_. Or jump directly to `cheatsheet `_. Works with Python 2.7, 3.4+ and pypy. Installation ------------- :: pip install funcy Overview -------------- Import stuff from funcy to make things happen: .. code:: python from funcy import whatever, you, need Merge collections of same type (works for dicts, sets, lists, tuples, iterators and even strings): .. code:: python merge(coll1, coll2, coll3, ...) join(colls) merge_with(sum, dict1, dict2, ...) Walk through collection, creating its transform (like map but preserves type): .. code:: python walk(str.upper, {'a', 'b'}) # {'A', 'B'} walk(reversed, {'a': 1, 'b': 2}) # {1: 'a', 2: 'b'} walk_keys(double, {'a': 1, 'b': 2}) # {'aa': 1, 'bb': 2} walk_values(inc, {'a': 1, 'b': 2}) # {'a': 2, 'b': 3} Select a part of collection: .. code:: python select(even, {1,2,3,10,20}) # {2,10,20} select(r'^a', ('a','b','ab','ba')) # ('a','ab') select_keys(callable, {str: '', None: None}) # {str: ''} compact({2, None, 1, 0}) # {1,2} Manipulate sequences: .. code:: python take(4, iterate(double, 1)) # [1, 2, 4, 8] first(drop(3, count(10))) # 13 lremove(even, [1, 2, 3]) # [1, 3] lconcat([1, 2], [5, 6]) # [1, 2, 5, 6] lcat(map(range, range(4))) # [0, 0, 1, 0, 1, 2] lmapcat(range, range(4)) # same flatten(nested_structure) # flat iter distinct('abacbdd') # iter('abcd') lsplit(odd, range(5)) # ([1, 3], [0, 2, 4]) lsplit_at(2, range(5)) # ([0, 1], [2, 3, 4]) group_by(mod3, range(5)) # {0: [0, 3], 1: [1, 4], 2: [2]} lpartition(2, range(5)) # [[0, 1], [2, 3]] chunks(2, range(5)) # iter: [0, 1], [2, 3], [4] pairwise(range(5)) # iter: [0, 1], [1, 2], ... And functions: .. code:: python partial(add, 1) # inc curry(add)(1)(2) # 3 compose(inc, double)(10) # 21 complement(even) # odd all_fn(isa(int), even) # is_even_int one_third = rpartial(operator.div, 3.0) has_suffix = rcurry(str.endswith, 2) Create decorators easily: .. code:: python @decorator def log(call): print call._func.__name__, call._args return call() Abstract control flow: .. code:: python walk_values(silent(int), {'a': '1', 'b': 'no'}) # => {'a': 1, 'b': None} @once def initialize(): "..." with suppress(OSError): os.remove('some.file') @ignore(ErrorRateExceeded) @limit_error_rate(fails=5, timeout=60) @retry(tries=2, errors=(HttpError, ServiceDown)) def some_unreliable_action(...): "..." class MyUser(AbstractBaseUser): @cached_property def public_phones(self): return self.phones.filter(public=True) Ease debugging: .. code:: python squares = {tap(x, 'x'): tap(x * x, 'x^2') for x in [3, 4]} # x: 3 # x^2: 9 # ... @print_exits def some_func(...): "..." @log_calls(log.info, errors=False) @log_errors(log.exception) def some_suspicious_function(...): "..." with print_durations('Creating models'): Model.objects.create(...) # ... # 10.2 ms in Creating models And `much more `_. Dive in ------- Funcy is an embodiment of ideas I explain in several essays: - `Why Every Language Needs Its Underscore `_ - `Functional Python Made Easy `_ - `Abstracting Control Flow `_ - `Painless Decorators `_ Running tests -------------- To run the tests using your default python: :: pip install -r test_requirements.txt py.test To fully run ``tox`` you need all the supported pythons to be installed. These are 2.6+, 3.3+, PyPy and PyPy3. You can run it for particular environment even in absense of all of the above:: tox -e py27 tox -e py36 tox -e lint .. |Build Status| image:: https://github.com/Suor/funcy/actions/workflows/test.yml/badge.svg :target: https://github.com/Suor/funcy/actions/workflows/test.yml?query=branch%3Amaster funcy-1.17/TODO.rst000066400000000000000000000012721416001257100140640ustar00rootroot00000000000000TODO ==== - public xfunc/xfn, xpred - where_not? - invalidate/invalidate_all() to (make|silent)_lookuper? - decorators with optional arguments? Or not TODO ----------- - pre_walk, post_walk - tree-seq - (log|print)_errors to optionally hide causing call - log_* and print_* to optionally hide args - padding to chunks - partial.func interface or (func, arg1, arg2) extended fns - reject*(), disjoint*() collections - zip_with = map(f, zip(seqs)) - starfilter() - one argument select*()? other name? - reversed() to work with iterators - vector chained boolean test (like perl 6 [<]) Unknown future -------------- - cython implementation? separate - cyfuncy? fallback transparently? - funcyx? funcy-1.17/VERSION000066400000000000000000000000051416001257100136260ustar00rootroot000000000000001.17 funcy-1.17/docs/000077500000000000000000000000001416001257100135135ustar00rootroot00000000000000funcy-1.17/docs/Makefile000066400000000000000000000131251416001257100151550ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo @echo "Build finished. The coverage pages are in $(BUILDDIR)/coverage." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/funcy.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/funcy.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/funcy" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/funcy" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." funcy-1.17/docs/_static/000077500000000000000000000000001416001257100151415ustar00rootroot00000000000000funcy-1.17/docs/_static/nhpup_1.1.js000066400000000000000000000151061416001257100172130ustar00rootroot00000000000000/* -------------------------------------------------------------------------- Code for link-hover text boxes By Nicolas Höning Usage: a link The configuration dict with CSS class and width is optional - default is class .pup and width of 200px. You can style the popup box via CSS, targeting its ID #pup. You can escape " in the popup text with ". Tutorial and support at http://nicolashoening.de?twocents&nr=8 -------------------------------------------------------------------------- The MIT License (MIT) Copyright (c) 2014 Nicolas Höning Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ nhpup = { pup: null, // This is the popup box, represented by a div identifier: "pup", // Name of ID and class of the popup box minMargin: 15, // Set how much minimal space there should be (in pixels) // between the popup and everything else (borders, mouse) default_width: 200, // Will be set to width from css in document.ready move: false, // Move it around with the mouse? we are only ready for that when the mouse event is set up. // Besides, having this turned off initially is resource-friendly. /* Write message, show popup w/ custom width if necessary, make sure it disappears on mouseout */ popup: function(p_msg, p_config) { return function(e) { // do track mouse moves and update position this.move = true; // restore defaults this.pup.removeClass() .addClass(this.identifier) .width(this.default_width); // custom configuration if (typeof p_config != 'undefined') { if ('class' in p_config) { this.pup.addClass(p_config['class']); } if ('width' in p_config) { this.pup.width(p_config['width']); } } if (nhpup.hiding) { clearTimeout(nhpup.hiding); nhpup.hiding = null; } // Write content and display this.pup.html(p_msg).show(); // Make sure popup goes away on mouse out and we stop the constant // positioning on mouse moves. // The event obj needs to be gotten from the virtual // caller, since we use onmouseover='nhpup.popup(p_msg)' var t = this.getTarget(e); $jq(t).unbind('mouseout').bind('mouseout', function(e){ nhpup.move = false; if (nhpup.hiding) clearTimeout(nhpup.hiding); nhpup.hiding = setTimeout(function () { nhpup.pup.hide(); }, 50) } ); }.bind(this); }, // set the target element position setElementPos: function(x, y) { // Call nudge to avoid edge overflow. Important tweak: x+10, because if // the popup is where the mouse is, the hoverOver/hoverOut events flicker var x_y = this.nudge(x + 10, y); // remember: the popup is still hidden this.pup.css('top', x_y[1] + 'px') .css('left', x_y[0] + 'px'); }, /* Avoid edge overflow */ nudge: function(x,y) { var win = $jq(window); // When the mouse is too far on the right, put window to the left var xtreme = $jq(document).scrollLeft() + win.width() - this.pup.width() - this.minMargin; if(x > xtreme) { x -= this.pup.width() + 2 * this.minMargin; } x = this.max(x, 0); // When the mouse is too far down, move window up if((y + this.pup.height()) > (win.height() + $jq(document).scrollTop())) { y -= this.pup.height() + this.minMargin; } return [ x, y ]; }, /* custom max */ max: function(a,b) { if (a>b) return a; else return b; }, /* Get the target (element) of an event. Inspired by quirksmode */ getTarget: function(e) { var targ; if (!e) var e = window.event; if (e.target) targ = e.target; else if (e.srcElement) targ = e.srcElement; if (targ.nodeType == 3) // defeat Safari bug targ = targ.parentNode; return targ; }, onTouchDevice: function() { var deviceAgent = navigator.userAgent.toLowerCase(); return deviceAgent.match(/(iphone|ipod|ipad|android|blackberry|iemobile|opera m(ob|in)i|vodafone)/) !== null; }, initialized: false, initialize : function(){ if (this.initialized) return; window.$jq = jQuery; // this is safe in WP installations with noConflict mode (which is default) /* Prepare popup and define the mouseover callback */ jQuery(document).ready(function () { // create default popup on the page $jq('body').append(''); nhpup.pup = $jq('#' + nhpup.identifier); // set dynamic coords when the mouse moves $jq(document).mousemove(function (e) { if (!nhpup.onTouchDevice()) { // turn off constant repositioning for touch devices (no use for this anyway) if (nhpup.move) { nhpup.setElementPos(e.pageX, e.pageY); } } }); }); this.initialized = true; } }; if ('jQuery' in window) nhpup.initialize(); funcy-1.17/docs/_static/overrides.css000066400000000000000000000014531416001257100176600ustar00rootroot00000000000000@import "css/theme.css"; .rst-content dl:not(.docutils) dt { float: left; margin: 2px 2px 1px 0px !important; } .rst-content dl:not(.docutils) dt + dd { padding-top: 6px; } .rst-content dl dd { clear: both; } .rst-content div[class^='highlight'] pre { padding: 7px 7px 4px 7px; font-size: 14px; line-height: 140%; } code.literal .pre { font-size: 13px; } .rst-content .external code { color: #404040; } /* Tables with functions */ .rst-content table.docutils td {vertical-align: top; padding: 6px 16px; line-height: 20px} .wy-table-responsive table td {white-space: normal} .wy-table-responsive table td:first-child {white-space: nowrap} .rst-content code.xref {color: #2E7FB3; font-size: 90%; padding: 0px 2px; border: none; background: none; line-height: 20px} funcy-1.17/docs/calc.rst000066400000000000000000000055171416001257100151570ustar00rootroot00000000000000Calculation =========== .. decorator:: memoize(key_func=None) Memoizes decorated function results, trading memory for performance. Can skip memoization for failed calculation attempts:: @memoize # Omitting parentheses is ok def ip_to_city(ip): try: return request_city_from_slow_service(ip) except NotFound: return None # return None and memoize it except Timeout: raise memoize.skip(CITY) # return CITY, but don't memoize it Additionally ``@memoize`` exposes its memory for you to manipulate:: # Prefill memory ip_to_city.memory.update({...}) # Forget everything ip_to_city.memory.clear() Custom `key_func` could be used to work with unhashable objects, insignificant arguments, etc:: @memoize(key_func=lambda obj, verbose=None: obj.key) def do_things(obj, verbose=False): # ... .. decorator:: make_lookuper As :func:`@memoize`, but with prefilled memory. Decorated function should return all available arg-value pairs, which should be a dict or a sequence of pairs. Resulting function will raise ``LookupError`` for any argument missing in it:: @make_lookuper def city_location(): return {row['city']: row['location'] for row in fetch_city_locations()} If decorated function has arguments then separate lookuper with its own lookup table is created for each combination of arguments. This can be used to make lookup tables on demand:: @make_lookuper def function_lookup(f): return {x: f(x) for x in range(100)} fast_sin = function_lookup(math.sin) fast_cos = function_lookup(math.cos) Or load some resources, memoize them and use as a function:: @make_lookuper def translate(lang): return make_list_of_pairs(load_translation_file(lang)) russian_phrases = lmap(translate('ru'), english_phrases) .. decorator:: silent_lookuper Same as :func:`@make_lookuper`, but returns ``None`` on memory miss. .. decorator:: cache(timeout, key_func=None) Caches decorated function results for ``timeout``. It can be either number of seconds or :class:`py3:datetime.timedelta`:: @cache(60 * 60) def api_call(query): # ... Cache can be invalidated before timeout with:: api_call.invalidate(query) # Forget cache for query api_call.invalidate_all() # Forget everything Custom ``key_func`` could be used same way as in :func:`@memoize`:: # Do not use token in cache key @cache(60 * 60, key_func=lambda query, token=None: query) def api_call(query, token=None): # ... .. raw:: html :file: descriptions.html funcy-1.17/docs/cheatsheet.rst000066400000000000000000000106211416001257100163620ustar00rootroot00000000000000.. _cheatsheet: Cheatsheet ========== Hover over function to get its description. Click to jump to docs. Sequences --------- ========== ============================================================== Create :func:`count` :func:`cycle` :func:`repeat` :func:`repeatedly` :func:`iterate` :func:`re_all` :func:`re_iter` Access :func:`first` :func:`second` :func:`last` :func:`nth` :func:`some` :func:`take` Slice :func:`take` :func:`drop` :func:`rest` :func:`butlast` :func:`takewhile` :func:`dropwhile` :func:`split_at` :func:`split_by` Transform :func:`map` :func:`mapcat` :func:`keep` :func:`pluck` :func:`pluck_attr` :func:`invoke` Filter :func:`filter` :func:`remove` :func:`keep` :func:`distinct` :func:`where` :func:`without` Join :func:`cat` :func:`concat` :func:`flatten` :func:`mapcat` :func:`interleave` :func:`interpose` Partition :func:`chunks` :func:`partition` :func:`partition_by` :func:`split_at` :func:`split_by` Group :func:`split` :func:`count_by` :func:`count_reps` :func:`group_by` :func:`group_by_keys` :func:`group_values` Aggregate :func:`ilen` :func:`reductions` :func:`sums` :func:`all` :func:`any` :func:`none` :func:`one` :func:`count_by` :func:`count_reps` Iterate :func:`pairwise` :func:`with_prev` :func:`with_next` :func:`zip_values` :func:`zip_dicts` :func:`tree_leaves` :func:`tree_nodes` ========== ============================================================== .. _colls: Collections ----------- ===================== ============================================================== Join :func:`merge` :func:`merge_with` :func:`join` :func:`join_with` Transform :func:`walk` :func:`walk_keys` :func:`walk_values` Filter :func:`select` :func:`select_keys` :func:`select_values` :func:`compact` Dicts :ref:`*` :func:`flip` :func:`zipdict` :func:`pluck` :func:`where` :func:`itervalues` :func:`iteritems` :func:`zip_values` :func:`zip_dicts` :func:`project` :func:`omit` Misc :func:`empty` :func:`get_in` :func:`set_in` :func:`update_in` :func:`del_in` :func:`has_path` ===================== ============================================================== Functions --------- .. :ref:`*` ========== ============================================================== Create :func:`identity` :func:`constantly` :func:`func_partial` :func:`partial` :func:`rpartial` :func:`iffy` :func:`caller` :func:`re_finder` :func:`re_tester` Transform :func:`complement` :func:`iffy` :func:`autocurry` :func:`curry` :func:`rcurry` Combine :func:`compose` :func:`rcompose` :func:`juxt` :func:`all_fn` :func:`any_fn` :func:`none_fn` :func:`one_fn` :func:`some_fn` ========== ============================================================== Other topics ------------ ================== ============================================================== Content tests :func:`all` :func:`any` :func:`none` :func:`one` :func:`is_distinct` Type tests :func:`isa` :func:`is_iter` :func:`is_list` :func:`is_tuple` :func:`is_set` :func:`is_mapping` :func:`is_seq` :func:`is_seqcoll` :func:`is_seqcont` :func:`iterable` Decorators :func:`decorator` :func:`wraps` :func:`unwrap` :func:`autocurry` Control flow :func:`once` :func:`once_per` :func:`once_per_args` :func:`collecting` :func:`joining` :func:`post_processing` :func:`throttle` :func:`wrap_with` Error handling :func:`retry` :func:`silent` :func:`ignore` :func:`suppress` :func:`limit_error_rate` :func:`fallback` :func:`raiser` :func:`reraise` Debugging :func:`tap` :func:`log_calls` :func:`log_enters` :func:`log_exits` :func:`log_errors` :func:`log_durations` :func:`log_iter_durations` Caching :func:`memoize` :func:`cache` :func:`cached_property` :func:`cached_readonly` :func:`make_lookuper` :func:`silent_lookuper` Regexes :func:`re_find` :func:`re_test` :func:`re_all` :func:`re_iter` :func:`re_finder` :func:`re_tester` Strings :func:`cut_prefix` :func:`cut_suffix` :func:`str_join` Objects :func:`cached_property` :func:`cached_readonly` :func:`wrap_prop` :func:`monkey` :func:`invoke` :func:`pluck_attr` :class:`namespace` :class:`LazyObject` Primitives :func:`isnone` :func:`notnone` :func:`inc` :func:`dec` :func:`even` :func:`odd` ================== ============================================================== .. raw:: html :file: descriptions.html funcy-1.17/docs/colls.rst000066400000000000000000000273571416001257100153770ustar00rootroot00000000000000Collections =========== Unite ----- .. function:: merge(*colls) Merges several collections of same type into one: dicts, sets, lists, tuples, iterators or strings. For dicts values of later dicts override values of former ones with same keys. Can be used in variety of ways, but merging dicts is probably most common:: def utility(**options): defaults = {...} options = merge(defaults, options) ... If you merge sequences and don't need to preserve collection type, then use :func:`concat` or :func:`lconcat` instead. .. function:: join(colls) Joins collections of same type into one. Same as :func:`merge`, but accepts iterable of collections. Use :func:`cat` and :func:`lcat` for non-type preserving sequence join. Transform and select -------------------- All functions in this section support :ref:`extended_fns`. .. function:: walk(f, coll) Returns a collection of same type as ``coll`` consisting of its elements mapped with the given function:: walk(inc, {1, 2, 3}) # -> {2, 3, 4} walk(inc, (1, 2, 3)) # -> (2, 3, 4) When walking dict, ``(key, value)`` pairs are mapped, i.e. this lines :func:`flip` dict:: swap = lambda (k, v): (v, k) walk(swap, {1: 10, 2: 20}) :func:`walk` works with strings too:: walk(lambda x: x * 2, 'ABC') # -> 'AABBCC' walk(compose(str, ord), 'ABC') # -> '656667' One should use :func:`map` when there is no need to preserve collection type. .. note about constructor interface? .. function:: walk_keys(f, coll) Walks keys of ``coll``, mapping them with the given function. Works with mappings and collections of pairs:: walk_keys(str.upper, {'a': 1, 'b': 2}) # {'A': 1, 'B': 2} walk_keys(int, json.loads(some_dict)) # restore key type lost in translation Important to note that it preserves collection type whenever this is simple :class:`py3:dict`, :class:`~py3:collections.defaultdict`, :class:`~py3:collections.OrderedDict` or any other mapping class or a collection of pairs. .. function:: walk_values(f, coll) Walks values of ``coll``, mapping them with the given function. Works with mappings and collections of pairs. Common use is to process values somehow:: clean_values = walk_values(int, form_values) sorted_groups = walk_values(sorted, groups) Hint: you can use :func:`partial(sorted, key=...) ` instead of :func:`py3:sorted` to sort in non-default way. Note that ``walk_values()`` has special handling for :class:`defaultdicts `. It constructs new one with values mapped the same as for ordinary dict, but a default factory of new ``defaultdict`` would be a composition of ``f`` and old default factory:: d = defaultdict(lambda: 'default', a='hi', b='bye') walk_values(str.upper, d) # -> defaultdict(lambda: 'DEFAULT', a='HI', b='BYE') .. function:: select(pred, coll) Filters elements of ``coll`` by ``pred`` constructing a collection of same type. When filtering a dict ``pred`` receives ``(key, value)`` pairs. See :func:`select_keys` and :func:`select_values` to filter it by keys or values respectively:: select(even, {1, 2, 3, 10, 20}) # -> {2, 10, 20} select(lambda (k, v): k == v, {1: 1, 2: 3}) # -> {1: 1} .. function:: select_keys(pred, coll) Select part of a dict or a collection of pairs with keys passing the given predicate. This way a public part of instance attributes dictionary could be selected:: is_public = complement(re_tester('^_')) public = select_keys(is_public, instance.__dict__) .. function:: select_values(pred, coll) Select part of a dict or a collection of pairs with values passing the given predicate:: # Leave only str values select_values(isa(str), values) # Construct a dict of methods select_values(inspect.isfunction, cls.__dict__) .. function:: compact(coll) Removes falsy values from given collection. When compacting a dict all keys with falsy values are removed. Extract integer data from request:: compact(walk_values(silent(int), request_dict)) Dict utils ---------- .. function:: merge_with(f, *dicts) join_with(f, dicts) Merge several dicts combining values for same key with given function:: merge_with(list, {1: 1}, {1: 10, 2: 2}) # -> {1: [1, 10], 2: [2]} merge_with(sum, {1: 1}, {1: 10, 2: 2}) # -> {1: 11, 2: 2} join_with(first, ({n % 3: n} for n in range(100, 110))) # -> {0: 102, 1: 100, 2: 101} .. function:: zipdict(keys, vals) Returns a dict with the ``keys`` mapped to the corresponding ``vals``. Stops pairing on shorter sequence end:: zipdict('abcd', range(4)) # -> {'a': 0, 'b': 1, 'c': 2, 'd': 3} zipdict('abc', count()) # -> {'a': 0, 'b': 1, 'c': 2} .. function:: flip(mapping) Flip passed dict swapping its keys and values. Also works for sequences of pairs. Preserves collection type:: flip(OrderedDict(['aA', 'bB'])) # -> OrderedDict([('A', 'a'), ('B', 'b')]) .. function:: project(mapping, keys) Returns a dict containing only those entries in ``mapping`` whose key is in ``keys``. Most useful to shrink some common data or options to predefined subset. One particular case is constructing a dict of used variables:: merge(project(__builtins__, names), project(globals(), names)) .. function:: omit(mapping, keys) Returns a copy of ``mapping`` with ``keys`` omitted. Preserves collection type:: omit({'a': 1, 'b': 2, 'c': 3}, 'ac') # -> {'b': 2} .. function:: zip_values(*dicts) Yields tuples of corresponding values of given dicts. Skips any keys not present in all of the dicts. Comes in handy when comparing two or more dicts:: error = sum((x - y) ** 2 for x, y in zip_values(result, reference)) .. function:: zip_dicts(*dicts) Yields tuples like ``key, (value1, value2, ...)`` for each common key of all given dicts. A neat way to process several dicts at once:: changed_items = [id for id, (new, old) in zip_dicts(items, old_items) if abs(new - old) >= PRECISION] lines = {id: cnt * price for id, (cnt, price) in zip_dicts(amounts, prices)} See also :func:`zip_values`. .. function:: get_in(coll, path, default=None) Returns a value corresponding to ``path`` in nested collection:: get_in({"a": {"b": 42}}, ["a", "b"]) # -> 42 get_in({"a": {"b": 42}}, ["c"], "foo") # -> "foo" Note that missing key or index, i.e. `KeyError` and `IndexError` result into `default` being return, while trying to use non-int index for a list will result into `TypeError`. This way funcy stays strict on types. .. function:: set_in(coll, path, value) Creates a nested collection with the ``value`` set at specified ``path``. Original collection is not changed:: set_in({"a": {"b": 42}}, ["a", "b"], 10) # -> {"a": {"b": 10}} set_in({"a": {"b": 42}}, ["a", "c"], 10) # -> {"a": {"b": 42, "c": 10}} .. function:: update_in(coll, path, update, default=None) Creates a nested collection with a value at specified ``path`` updated:: update_in({"a": {}}, ["a", "cnt"], inc, default=0) # -> {"a": {"cnt": 1}} .. function:: del_in(coll, path) Creates a nested collection with ``path`` removed:: del_in({"a": [1, 2, 3]}, ["a", 1]) # -> {"a": [1, 3]} Returns the collection as is if the path is missing. .. function:: has_path(coll, path) Checks if path exists in the given nested collection:: has_path({"a": {"b": 42}}, ["a", "b"]) # -> True has_path({"a": {"b": 42}}, ["c"]) # -> False has_path({"a": [1, 2]}, ["a", 0]) # -> True Data manipulation ----------------- .. function:: where(mappings, **cond) lwhere(mappings, **cond) Looks through each value in given sequence of dicts and returns an iterator or a list of all the dicts that contain all key-value pairs in ``cond``:: lwhere(plays, author="Shakespeare", year=1611) # => [{"title": "Cymbeline", "author": "Shakespeare", "year": 1611}, # {"title": "The Tempest", "author": "Shakespeare", "year": 1611}] Iterator version could be used for efficiency or when you don't need the whole list. E.g. you are looking for the first match:: first(where(plays, author="Shakespeare")) # => {"title": "The Two Gentlemen of Verona", ...} .. function:: pluck(key, mappings) lpluck(key, mappings) Returns an iterator or a list of values for ``key`` in each mapping in the given sequence. Essentially a shortcut for:: map(operator.itemgetter(key), mappings) .. function:: pluck_attr(attr, objects) lpluck_attr(attr, objects) Returns an iterator or a list of values for ``attr`` in each object in the given sequence. Essentially a shortcut for:: map(operator.attrgetter(attr), objects) Useful when dealing with collections of ORM objects:: users = User.query.all() ids = lpluck_attr('id', users) .. function:: invoke(objects, name, *args, **kwargs) linvoke(objects, name, *args, **kwargs) Calls named method with given arguments for each object in ``objects`` and returns an iterator or a list of results. Content tests ------------- .. function:: is_distinct(coll, key=identity) Checks if all elements in the collection are different:: assert is_distinct(field_names), "All fields should be named differently" Uses ``key`` to differentiate values. This way one can check if all first letters of ``words`` are different:: is_distinct(words, key=0) .. function:: all([pred], seq) Checks if ``pred`` holds for every element in a ``seq``. If ``pred`` is omitted checks if all elements of ``seq`` are truthy -- same as in built-in :func:`py3:all`:: they_are_ints = all(is_instance(n, int) for n in seq) they_are_even = all(even, seq) Note that, first example could be rewritten using :func:`isa` like this:: they_are_ints = all(isa(int), seq) .. function:: any([pred], seq) Returns ``True`` if ``pred`` holds for any item in given sequence. If ``pred`` is omitted checks if any element of ``seq`` is truthy. Check if there is a needle in haystack, using :ref:`extended predicate semantics `:: any(r'needle', haystack_strings) .. function:: none([pred], seq) Checks if none of items in given sequence pass ``pred`` or is truthy if ``pred`` is omitted. Just a stylish way to write ``not any(...)``:: assert none(' ' in name for name in names), "Spaces in names not allowed" # Or same using extended predicate semantics assert none(' ', names), "..." .. function:: one([pred], seq) Returns true if exactly one of items in ``seq`` passes ``pred``. Cheks for truthiness if ``pred`` is omitted. .. function:: some([pred], seq) Finds first item in ``seq`` passing ``pred`` or first that is true if ``pred`` is omitted. Low-level helpers ----------------- .. function:: empty(coll) Returns an empty collection of the same type as ``coll``. .. function:: iteritems(coll) Returns an iterator of items of a ``coll``. This means ``key, value`` pairs for any dictionaries:: list(iteritems({1, 2, 42})) # -> [1, 42, 2] list(iteritems({'a': 1})) # -> [('a', 1)] .. function:: itervalues(coll) Returns an iterator of values of a ``coll``. This means values for any dictionaries and just elements for other collections:: list(itervalues({1, 2, 42})) # -> [1, 42, 2] list(itervalues({'a': 1})) # -> [1] .. raw:: html :file: descriptions.html funcy-1.17/docs/conf.py000066400000000000000000000204321416001257100150130ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # funcy documentation build configuration file, created by # sphinx-quickstart on Tue Dec 18 21:32:23 2012. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os, re sys.path.insert(0, os.path.abspath('..')) on_rtd = os.environ.get('READTHEDOCS', None) == 'True' # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. needs_sphinx = '3.5.3' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.coverage', 'sphinx.ext.intersphinx', 'sphinx.ext.autodoc'] intersphinx_mapping = { 'py2': ('http://docs.python.org/2', None), 'py3': ('http://docs.python.org/3', None), } # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'funcy' copyright = u'2012-2021, Alexander Schepanovski' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # VERSION = open('../VERSION').read().strip() # The short X.Y version. version = re.match(r'^\d+\.\d+', VERSION).group(0) # The full version, including alpha/beta/rc tags. release = VERSION # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_style = '...' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'funcydoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'funcy.tex', u'funcy documentation', u'Alexander Schepanovski', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'funcy', u'funcy documentation', [u'Alexander Schepanovski'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'funcy', u'funcy documentation', u'Alexander Schepanovski', 'funcy', 'A fancy and practical functional tools.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # rst_prolog = """ # .. module:: funcy # """ def setup(app): app.add_css_file('overrides.css') funcy-1.17/docs/debug.rst000066400000000000000000000077341416001257100153460ustar00rootroot00000000000000Debugging ========= .. function:: tap(value, label=None) Prints a value and then returns it. Useful to tap into some functional pipeline for debugging:: fields = (f for f in fields_for(category) if section in tap(tap(f).sections)) # ... do something with fields If ``label`` is specified then it's printed before corresponding value:: squares = {tap(x, 'x'): tap(x * x, 'x^2') for x in [3, 4]} # x: 3 # x^2: 9 # x: 4 # x^2: 16 # => {3: 9, 4: 16} .. decorator:: log_calls(print_func, errors=True, stack=True, repr_len=25) print_calls(errors=True, stack=True, repr_len=25) Will log or print all function calls, including arguments, results and raised exceptions. Can be used as a decorator or tapped into call expression:: sorted_fields = sorted(fields, key=print_calls(lambda f: f.order)) If ``errors`` is set to ``False`` then exceptions are not logged. This could be used to separate channels for normal and error logging:: @log_calls(log.info, errors=False) @log_errors(log.exception) def some_suspicious_function(...): # ... return result .. decorator:: log_enters(print_func, repr_len=25) print_enters(repr_len=25) log_exits(print_func, errors=True, stack=True, repr_len=25) print_exits(errors=True, stack=True, repr_len=25) Will log or print every time execution enters or exits the function. Should be used same way as :func:`@log_calls()` and :func:`@print_calls()` when you need to track only one event per function call. .. decorator:: log_errors(print_func, label=None, stack=True, repr_len=25) print_errors(label=None, stack=True, repr_len=25) Will log or print all function errors providing function arguments causing them. If ``stack`` is set to ``False`` then each error is reported with simple one line message. Can be combined with :func:`@silent` or :func:`@ignore()` to trace occasionally misbehaving function:: @ignore(...) @log_errors(logging.warning) def guess_user_id(username): initial = first_guess(username) # ... Can also be used as context decorator:: with print_errors('initialization', stack=False): load_this() load_that() # ... # SomeException: a bad thing raised in initialization .. decorator:: log_durations(print_func, label=None, unit='auto', threshold=None, repr_len=25) print_durations(label=None, unit='auto', threshold=None, repr_len=25) Will time each function call and log or print its duration:: @log_durations(logging.info) def do_hard_work(n): samples = range(n) # ... # 121 ms in do_hard_work(10) # 143 ms in do_hard_work(11) # ... A block of code could be timed with a help of context manager:: with print_durations('Creating models'): Model.objects.create(...) # ... # 10.2 ms in Creating models ``unit`` argument can be set to ``'ns'``, ``'mks'``, ``'ms'`` or ``'s'`` to use uniform time unit. If ``threshold`` is set then durations under this number of seconds are not logged. Handy to capture slow queries or API calls:: @log_durations(logging.warning, threshold=0.5) def make_query(sql, params): # ... .. function:: log_iter_durations(seq, print_func, label=None, unit='auto') print_iter_durations(seq, label=None, unit='auto') Wraps iterable ``seq`` into generator logging duration of processing of each item:: for item in print_iter_durations(seq, label='hard work'): do_smth(item) # 121 ms in iteration 0 of hard work # 143 ms in iteration 1 of hard work # ... ``unit`` can be set to ``'ns'``, ``'mks'``, ``'ms'`` or ``'s'``. .. raw:: html :file: descriptions.html funcy-1.17/docs/decorators.rst000066400000000000000000000054621416001257100164210ustar00rootroot00000000000000Decorators ========== .. module:: funcy .. decorator:: decorator Transforms a flat wrapper into a decorator with or without arguments. ``@decorator`` passes special ``call`` object as a first argument to a wrapper. A resulting decorator will preserve function module, name and docstring. It also adds ``__wrapped__`` attribute referring to wrapped function and ``__original__`` attribute referring to innermost wrapped one. Here is a simple logging decorator:: @decorator def log(call): print(call._func.__name__, call._args, call._kwargs) return call() ``call`` object also supports by name arg introspection and passing additional arguments to decorated function:: @decorator def with_phone(call): # call.request gets actual request value upon function call request = call.request # ... phone = Phone.objects.get(number=request.GET['phone']) # phone arg is added to *args passed to decorated function return call(phone) @with_phone def some_view(request, phone): # ... some code using phone return # ... A better practice would be adding keyword argument not positional. This makes such decorators more composable:: @decorator def with_phone(call): # ... return call(phone=phone) @decorator def with_user(call): # ... return call(user=user) @with_phone @with_user def some_view(request, phone=None, user=None): # ... return # ... If a function wrapped with ``@decorator`` has arguments other than ``call``, then decorator with arguments is created:: @decorator def joining(call, sep): return sep.join(call()) Generally a decorator with arguments is required to be called with ``()`` when applied to function. However, if you use only keyword only parameters aside from ``call`` then you can omit them:: @decorator def rate_limit(call, *, extra_labels=None): # ... @rate_limit # no extra labels, parentheses are optional def func(request, ...): # ... @rate_limit(extra_labels=lambda r: [f"user:{r.user.pk}"]) def func(request, ...): # ... You can see more examples in :mod:`flow` and :mod:`debug` submodules source code. .. decorator:: contextmanager A decorator helping to create context managers. Resulting functions also behave as decorators. This is a reexport or backport of :func:`py3:contextlib.contextmanager`. .. autodecorator:: wraps(wrapped, [assigned], [updated]) .. autofunction:: unwrap .. autoclass:: ContextDecorator .. raw:: html :file: descriptions.html funcy-1.17/docs/descriptions.html000066400000000000000000000717711416001257100171240ustar00rootroot00000000000000
count(start=0, step=1)

Makes infinite iterator of values:
start, start + step, start + 2*step, ...
cycle(seq)

Cycles passed sequence indefinitely
yielding its elements one by one.
repeat(item[, n])

Makes an iterator yielding item for n times
or indefinitely if n is omitted.
repeatedly(f[, n])

Takes a function of no args, presumably with side effects,
and returns an infinite (or length n) iterator of calls to it.
iterate(f, x)

Returns an infinite iterator of x, f(x), f(f(x)), ...
re_all(regex, s, flags=0)

Lists all matches of regex in s.
re_iter(regex, s, flags=0)

Iterates over matches of regex in s.
first(seq)

Returns the first item in the sequence.
Returns None if the sequence is empty.
second(seq)

Returns second item in the sequence.
Returns None if there are less than two items in it.
last(seq)

Returns the last item in the sequence.
Returns None if the sequence is empty.
nth(seq)

Returns nth item in the sequence
or None if no such item exists.
some([pred, ]seq)

Finds first item in seq passing pred
or first that is true if pred is omitted.
take(n, seq)

Returns a list of first n items in the sequence,
or all items if there are fewer than n.
drop(n, seq)

Skips first n items in the sequence,
yields the rest.
rest(seq)

Skips first item in the sequence, yields the rest.
butlast(seq)

Yields all elements of the sequence but last.
takewhile([pred, ]seq)

Yields seq items as long as they pass pred.
dropwhile([pred, ]seq)

Skips elements of seq while pred passes
and then yields the rest.
split_at(n, seq)
lsplit_at(n, seq)


Splits the sequence at given position,
returning a tuple of its start and tail.
split_by(pred, seq)
lsplit_by(pred, seq)


Splits the start of the sequence,
consisting of items passing pred,
from the rest of it.
map(f, *seqs)
lmap(f, *seqs)


Extended versions of map() and list(map())
mapcat(f, *seqs)
lmapcat(f, *seqs)


Maps given sequence(s) and concatenates the results.
keep([f, ]*seqs)
lkeep([f, ]*seqs)


Maps seq with f and filters out falsy results.
Simply removes falsy values in one argument version.
pluck(key, mappings)
lpluck(key, mappings)


Yields or lists values for key in each mapping.
pluck_attr(attr, objects)
lpluck_attr(attr, objects)


Yields or lists values of attr of each object.
invoke(objects, name, *args, **kwargs)
linvoke(objects, name, *args, **kwargs)


Yields or lists results of the given method call
for each object in objects.
@wrap_prop(ctx)

Wrap a property accessors with ctx.
filter(pred, seq)
lfilter(pred, seq)


Extended versions of filter() and list(filter()).
remove(pred, seq)
lremove(pred, seq)


Removes items from seq passing given predicate.
distinct(seq, key=identity)
ldistinct(seq, key=identity)


Removes items having same key from seq.
Preserves order.
where(mappings, **cond)
lwhere(mappings, **cond)


Selects mappings containing all pairs in cond.
without(seq, *items)
lwithout(seq, *items)


Returns sequence without items,
preserves order.
cat(seqs)
lcat(seqs)


Concatenates passed sequences.
concat(*seqs)
lconcat(*seqs)


Concatenates several sequences.
flatten(seq, follow=is_seqcont)
lflatten(seq, follow=is_seqcont)


Flattens arbitrary nested sequence,
dives into when follow(item) is truthy.
interleave(*seqs)

Yields first item of each sequence,
then second one and so on.
interpose(sep, seq)

Yields items of seq separated by sep.
lzip(*seqs)

List version of zip()
chunks(n, [step, ]seq)
lchunks(n, [step, ]seq)


Chunks seq into parts of length n or less.
Skips step items between chunks.
partition(n, [step, ]seq)
lpartition(n, [step, ]seq)


Partitions seq into parts of length n.
Skips step items between parts.
Non-fitting tail is ignored.
partition_by(f, seq)
lpartition_by(f, seq)


Partition seq into continuous chunks
with constant value of f.
split(pred, seq)
lsplit(pred, seq)


Splits seq items which pass pred
from the ones that don't.
count_by(f, seq)

Counts numbers of occurrences of values of f
on elements of seq.
count_reps(seq)

Counts repetitions of each value in seq.
group_by(f, seq)

Groups items of seq by f(item).
group_by_keys(get_keys, seq)

Groups elements of seq by multiple keys.
group_values(seq)

Groups values of (key, value) pairs by keys.
ilen(seq)

Consumes the given iterator and returns its length.
reductions(f, seq[, acc])
lreductions(f, seq[, acc])


Constructs intermediate reductions of seq by f.
sums(seq[, acc])
lsums(seq[, acc])


Returns a sequence of partial sums of seq.
all([pred, ]seq)

Checks if all items in seq pass pred.
any([pred, ]seq)

Checks if any item in seq passes pred.
none([pred, ]seq)

Checks if none of the items in seq pass pred.
one([pred, ]seq)

Checks if exactly one item in seq passes pred.
pairwise(seq)

Yields all pairs of neighboring items in seq.
with_prev(seq, fill=None)

Yields each item from seq with the one preceding it.
with_next(seq, fill=None)

Yields each item from seq with the next one.
zip_values(*dicts)

Yields tuples of corresponding values of given dicts.
zip_dicts(*dicts)

Yields tuples like (key, val1, val2, ...)
for each common key in all given dicts.
tree_leaves(root, follow=is_seqcont, children=iter)
ltree_leaves(root, follow=is_seqcont, children=iter)


Lists or iterates over tree leaves.
tree_nodes(root, follow=is_seqcont, children=iter)
ltree_nodes(root, follow=is_seqcont, children=iter)


Lists or iterates over tree nodes.
merge(*colls)

Merges several collections of same type into one:
dicts, sets, lists, tuples, iterators or strings
For dicts later values take precedence.
merge_with(f, *dicts)

Merges several dicts combining values with given function.
join(colls)

Joins several collections of same type into one.
Same as merge() but accepts sequence of collections.
join_with(f, *dicts)

Joins several dicts combining values with given function.
walk(f, coll)

Maps coll with f, but preserves collection type.
walk_keys(f, coll)

Walks keys of coll, mapping them with f.
Works with dicts and collections of pairs.
walk_values(f, coll)

Walks values of coll, mapping them with f.
Works with dicts and collections of pairs.
select(pred, coll)

Filters elements of coll by pred
constructing a collection of same type.
select_keys(pred, coll)

Select part of coll with keys passing pred.
Works with dicts and collections of pairs.
select_values(pred, coll)

Select part of coll with values passing pred.
Works with dicts and collections of pairs.
compact(coll)

Removes falsy values from given collection.
All collections functions work with dicts.
These are targeted specifically at them.
flip(mapping)

Flip passed dict swapping its keys and values.
zipdict(keys, vals)

Creates a dict with keys mapped to the corresponding vals.
itervalues(coll)

Yields values of the given collection.
iteritems(coll)

Yields (key, value) pairs of the given collection.
project(mapping, keys)

Leaves only given keys in mapping.
omit(mapping, keys)

Removes given keys from mapping.
empty(coll)

Returns an empty collection of the same type as coll.
get_in(coll, path, default=None)

Returns a value at path in the given nested collection.
set_in(coll, path, value)

Creates a copy of coll with the value set at path.
update_in(coll, path, update, default=None)

Creates a copy of coll with a value updated at path.
del_in(coll, path)

Creates a copy of coll with path removed.
has_path(coll, path)

Tests whether path exists in a nested coll.
Most of functions in this section support extended semantics.
identity(x)

Returns its argument.
constantly(x)

Creates a function accepting any args, but always returning x.
func_partial(func, *args, **kwargs)

Like partial() but returns a real function.
partial(func, *args, **kwargs)

Returns partial application of func.
rpartial(func, *args)

Partially applies last arguments to func.
iffy([pred, ]action[, default=identity])

Creates a function, which conditionally applies action or default.
caller(*args, **kwargs)

Creates a function calling its argument with passed arguments.
re_finder(regex, flags=0)

Creates a function finding regex in passed string.
re_tester(regex, flags=0)

Creates a predicate testing passed strings with regex.
complement(pred)

Constructs a complementary predicate.
autocurry(func)

Creates a version of func returning its partial applications
until sufficient arguments are passed.
curry(func[, n])

Curries func into a chain of one argument functions.
Arguments are passed from left to right.
rcurry(func[, n])

Curries func from right to left.
compose(*fs)

Composes passed functions.
rcompose(*fs)

Composes fs, calling them from left to right.
juxt(*fs)
ljuxt(*fs)


Constructs a juxtaposition of the given functions.
Result returns a list or an iterator of results of fs.
all_fn(*fs)

Constructs a predicate,
which holds when all fs hold.
any_fn(*fs)

Constructs a predicate,
which holds when any of fs holds.
none_fn(*fs)

Constructs a predicate,
which holds when none of fs hold.
one_fn(*fs)

Constructs a predicate,
which holds when exactly one of fs holds.
some_fn(*fs)

Constructs a function, which calls fs one by one
and returns first truthy result.
is_distinct(*fs)

Checks if all elements in the collection are different.
isa(*types)

Creates a function checking if its argument
is of any of given types.
is_iter(value)

Checks whether value is an iterator.
is_mapping(value)

Checks whether value is a mapping.
is_set(value)

Checks whether value is a set.
is_list(value)

Checks whether value is a list.
is_tuple(value)

Checks whether value is a tuple.
is_seq(value)

Checks whether value is a Sequence.
is_mapping(value)

Checks whether value is a mapping.
is_seqcoll(value)

Checks whether value is a list or a tuple,
which are both sequences and collections.
is_seqcont(value)

Checks whether value is a list, a tuple or an iterator,
which are both sequences and containers.
iterable(value)

Checks whether value is iterable.
@decorator

Transforms a flat wrapper into a decorator.
@wraps

An utility to pass function metadata
from wrapped function to a wrapper.
unwrap(func)

Get the object wrapped by func.
@once

Let function execute only once.
Noop all subsequent calls.
@once_per(*argnames)

Call function only once for every combination
of the given arguments.
@once_per_args

Call function only once for every combination
of values of its arguments.
@collecting

Transforms a generator into list returning function.
@joining(sep)

Joins decorated function results with sep.
@post_processing(func)

Post processes decorated function result with func.
@throttle(period)

Only runs a decorated function once per period.
@wrap_with(ctx)

Turn context manager into a decorator.
nullcontext(enter_result=None)

A noop context manager.
@retry(tries, errors=Exception, timeout=0, filter_errors=None)

Tries decorated function up to tries times.
Retries only on specified errors.
@silent

Alters function to ignore all exceptions.
@ignore(errors, default=None)

Alters function to ignore errors,
returning default instead.
suppress(*errors)

The context manager suppressing errors in its block.
@limit_error_rate(fails, timeout, ...)

If function fails to complete fails times in a row,
calls to it will be blocked for timeout seconds.
fallback(*approaches)

Tries several approaches until one works.
Each approach has a form of (callable, errors).
raiser(exception=Exception, *args, **kwargs)

Constructs function that raises the given exception
with given arguments on any invocation.
@reraise(errors, into)

Intercepts errors and reraises them as into exception.
tap(x, label=None)

Prints x and then returns it.
@log_calls(print_func, errors=True, stack=True)
@print_calls(errors=True, stack=True)


Logs or prints all function calls,
including arguments, results and raised exceptions.
@log_enters(print_func)
@print_enters


Logs or prints on each enter to a function.
@log_exits(print_func, errors=True, stack=True)
@print_exits(errors=True, stack=True)


Logs or prints on each exit from a function.
@log_errors(print_func, label=None, stack=True)
@print_errors(label=None, stack=True)


Logs or prints all errors within a function or block.
@log_durations(print_func, label=None)
@print_durations(label=None)


Times each function call or block execution.
log_iter_durations(seq, print_func, label=None)
print_iter_durations(seq, label=None)


Times processing of each item in seq.
@memoize

Memoizes a decorated function results.
@cache(timeout)

Caches a function results for timeout seconds.
@cached_property

Creates a property caching its result.
@cached_property

Creates a read-only property caching its result.
@make_lookuper

Creates a cached function with prefilled memory.
@silent_lookuper

Creates a cached function with prefilled memory.
Ignores memory misses, returning None.
re_find(regex, s, flags=0)

Matches regex against the given string,
returns the match in the simplest possible form.
re_test(regex, s, flags=0)

Tests whether regex matches against s.
cut_prefix(s, prefix)

Cuts prefix from given string if it's present.
cut_suffix(s, suffix)

Cuts suffix from given string if it's present.
str_join([sep="", ]seq)

Joins the given sequence with sep.
Forces stringification of seq items.
@monkey(cls_or_module, name=None)

Monkey-patches class or module.
class namespace

A base class that prevents functions turning into methods.
class LazyObject(init)

Creates an object setting itself up on first use.
isnone(x)

Checks if x is None.
notnone(x)

Checks if x is not None.
inc(x)

Increments its argument by 1.
dec(x)

Decrements its argument by 1.
even(x)

Checks if x is even.
odd(x)

Checks if x is odd.
funcy-1.17/docs/extended_fns.rst000066400000000000000000000045011416001257100167130ustar00rootroot00000000000000.. _extended_fns: Extended function semantics =================================== Many of funcy functions expecting predicate or mapping function as an argument can take something uncallable instead of it with semantics described in this table: ============ ================================= ================================= f passed Function Predicate ============ ================================= ================================= ``None`` :func:`identity ` bool string :func:`re_finder(f) ` :func:`re_tester(f) ` int or slice ``itemgetter(f)`` ``itemgetter(f)`` mapping ``lambda x: f[x]`` ``lambda x: f[x]`` set ``lambda x: x in f`` ``lambda x: x in f`` ============ ================================= ================================= Supporting functions -------------------- Here is a full list of functions supporting extended function semantics: ========================= ============================================================== Group Functions ========================= ============================================================== Sequence transformation :func:`map` :func:`keep` :func:`mapcat` Sequence filtering :func:`filter` :func:`remove` :func:`distinct` Sequence splitting :func:`dropwhile` :func:`takewhile` :func:`split` :func:`split_by` :func:`partition_by` Aggregration :func:`group_by` :func:`count_by` :func:`group_by_keys` Collection transformation :func:`walk` :func:`walk_keys` :func:`walk_values` Collection filtering :func:`select` :func:`select_keys` :func:`select_values` Content tests :func:`all` :func:`any` :func:`none` :func:`one` :func:`some` :func:`is_distinct` Function logic :func:`all_fn` :func:`any_fn` :func:`none_fn` :func:`one_fn` :func:`some_fn` Function tools :func:`iffy` :func:`compose` :func:`rcompose` :func:`complement` :func:`juxt` :func:`all_fn` :func:`any_fn` :func:`none_fn` :func:`one_fn` :func:`some_fn` ========================= ============================================================== List or iterator versions of same functions not listed here for brevity but also support extended semantics. .. raw:: html :file: descriptions.html funcy-1.17/docs/flow.rst000066400000000000000000000177361416001257100152320ustar00rootroot00000000000000Flow ==== .. decorator:: silent Ignore all real exceptions (descendants of :exc:`~py3:exceptions.Exception`). Handy for cleaning data such as user input:: brand_id = silent(int)(request.GET['brand_id']) ids = keep(silent(int), request.GET.getlist('id')) And in data import/transform:: get_greeting = compose(silent(string.lower), re_finder(r'(\w+)!')) map(get_greeting, ['a!', ' B!', 'c.']) # -> ['a', 'b', None] .. note:: Avoid silencing non-primitive functions, use :func:`@ignore()` instead and even then be careful not to swallow exceptions unintentionally. .. decorator:: ignore(errors, default=None) Same as :func:`@silent`, but able to specify ``errors`` to catch and ``default`` to return in case of error caught. ``errors`` can either be exception class or a tuple of them. .. function:: suppress(*errors) A context manager which suppresses given exceptions under its scope:: with suppress(HttpError): # Assume this request can fail, and we are ok with it make_http_request() .. function:: nullcontext(enter_result=None) A noop context manager that returns ``enter_result`` from ``__enter__``:: ctx = nullcontext() if threads: ctx = op_thread_lock with ctx: # ... do stuff .. decorator:: once once_per_args once_per(*argnames) Call function only once, once for every combination of values of its arguments or once for every combination of given arguments. Thread safe. Handy for various initialization purposes:: # Global initialization @once def initialize_cache(): conn = some.Connection(...) # ... set up everything # Per argument initialization @once_per_args def initialize_language(lang): conf = load_language_conf(lang) # ... set up language # Setup each class once class SomeManager(Manager): @once_per('cls') def _initialize_class(self, cls): pre_save.connect(self._pre_save, sender=cls) # ... set up signals, no dups .. function:: raiser(exception_or_class=Exception, *args, **kwargs) Constructs function that raises given exception with given arguments on any invocation. You may pass a string instead of exception as a shortcut:: mocker.patch('mod.Class.propname', property(raiser("Shouldn't be called"))) This will raise an ``Exception`` with a corresponding message. .. decorator:: reraise(errors, into) Intercepts any error of ``errors`` classes and reraises it as ``into`` error. Can be used as decorator or a context manager:: with reraise(json.JSONDecodeError, SuspiciousOperation('Invalid JSON')): return json.loads(text) ``into`` can also be a callable to transform the error before reraising:: @reraise(requests.RequestsError, lambda e: MyAPIError(error_desc(e))) def api_call(...): # ... .. decorator:: retry(tries, errors=Exception, timeout=0, filter_errors=None) Every call of the decorated function is tried up to ``tries`` times. The first attempt counts as a try. Retries occur when any subclass of ``errors`` is raised, where``errors`` is an exception class or a list/tuple of exception classes. There will be a delay in ``timeout`` seconds between tries. A common use is to wrap some unreliable action:: @retry(3, errors=HttpError) def download_image(url): # ... make http request return image Errors to retry may addtionally be filtered with ``filter_errors`` when classes are not specific enough:: @retry(3, errors=HttpError, filter_errors=lambda e: e.status_code >= 500) def download_image(url): # ... You can pass a callable as ``timeout`` to achieve exponential delays or other complex behavior:: @retry(3, errors=HttpError, timeout=lambda a: 2 ** a) def download_image(url): # ... make http request return image .. function:: fallback(*approaches) Tries several approaches until one works. Each approach is either callable or a tuple ``(callable, errors)``, where errors is an exception class or a tuple of classes, which signal to fall back to next approach. If ``errors`` is not supplied then fall back is done for any :exc:`~py3:exceptions.Exception`:: fallback( (partial(send_mail, ADMIN_EMAIL, message), SMTPException), partial(log.error, message), # Handle any Exception (raiser(FeedbackError, "Failed"), ()) # Handle nothing ) .. function:: limit_error_rate(fails, timeout, exception=ErrorRateExceeded) If function fails to complete ``fails`` times in a row, calls to it will be intercepted for ``timeout`` with ``exception`` raised instead. A clean way to short-circuit function taking too long to fail:: @limit_error_rate(fails=5, timeout=60, exception=RequestError('Temporary unavailable')) def do_request(query): # ... make a http request return data Can be combined with :func:`ignore` to silently stop trying for a while:: @ignore(ErrorRateExceeded, default={'id': None, 'name': 'Unknown'}) @limit_error_rate(fails=5, timeout=60) def get_user(id): # ... make a http request return data .. function:: throttle(period) Only runs a decorated function once in a ``period``:: @throttle(60) def process_beat(pk, progress): Model.objects.filter(pk=pk).update(beat=timezone.now(), progress=progress) # Processing something, update progress info no more often then once a minute for i in ...: process_beat(pk, i / n) # ... do actual processing .. decorator:: collecting Transforms generator or other iterator returning function into list returning one. Handy to prevent quirky iterator-returning properties:: @property @collecting def path_up(self): node = self while node: yield node node = node.parent Also makes list constructing functions beautifully yielding. .. Or you could just write:: .. @property .. def path_up(self): .. going_up = iterate(attrgetter('parent'), self) .. return list(takewhile(bool, going_up)) .. decorator:: joining(sep) Wraps common python idiom "collect then join" into a decorator. Transforms generator or alike into function, returning string of joined results. Automatically converts all elements to separator type for convenience. Goes well with generators with some ad-hoc logic within:: @joining(', ') def car_desc(self): yield self.year_made if self.engine_volume: yield '%s cc' % self.engine_volume if self.transmission: yield self.get_transmission_display() if self.gear: yield self.get_gear_display() # ... Use ``bytes`` separator to get bytes result:: @joining(b' ') def car_desc(self): yield self.year_made # ... See also :func:`str_join`. .. decorator:: post_processing(func) Passes decorated function result through ``func``. This is the generalization of :func:`@collecting` and :func:`@joining()`. Could save you writing a decorator or serve as an extended comprehension: :: @post_processing(dict) def make_cond(request): if request.GET['new']: yield 'year__gt', 2000 for key, value in request.GET.items(): if value == '': continue # ... .. decorator:: wrap_with(ctx) Turns a context manager into a decorator:: @wrap_with(threading.Lock()) def protected_func(...): # ... .. raw:: html :file: descriptions.html funcy-1.17/docs/funcs.rst000066400000000000000000000122671416001257100153730ustar00rootroot00000000000000Functions ========= .. function:: identity(x) Returns its argument. .. function:: constantly(x) Returns function accepting any args, but always returning ``x``. .. function:: caller(*args, **kwargs) Returns function calling its argument with passed arguments. .. function:: partial(func, *args, **kwargs) Returns partial application of ``func``. A re-export of :func:`py3:functools.partial`. Can be used in a variety of ways. DSLs is one of them:: field = dict json_field = partial(field, json=True) .. function:: rpartial(func, *args) Partially applies last arguments in ``func``:: from operator import div one_third = rpartial(div, 3.0) Arguments are passed to ``func`` in the same order as they came to :func:`rpartial`:: separate_a_word = rpartial(str.split, ' ', 1) .. function:: func_partial(func, *args, **kwargs) Like :func:`partial` but returns a real function. Which is useful when, for example, you want to create a method of it:: setattr(self, 'get_%s_display' % field.name, func_partial(_get_FIELD_display, field)) Use :func:`partial` if you are ok to get callable object instead of function as it's faster. .. function:: curry(func[, n]) Curries function. For example, given function of two arguments ``f(a, b)`` returns function:: lambda a: lambda b: f(a, b) Handy to make a partial factory:: make_tester = curry(re_test) is_word = make_tester(r'^\w+$') is_int = make_tester(r'^[1-9]\d*$') But see :func:`re_tester` if you need this particular one. .. function:: rcurry(func[, n]) Curries function from last argument to first:: has_suffix = rcurry(str.endswith, 2) lfilter(has_suffix("ce"), ["nice", "cold", "ice"]) # -> ["nice", "ice"] Can fix number of arguments when it's ambiguous:: to_power = rcurry(pow, 2) # curry 2 first args in reverse order to_square = to_power(2) to_cube = to_power(3) .. function:: autocurry(func) Constructs a version of ``func`` returning its partial applications until sufficient arguments are passed:: def remainder(what, by): return what % by rem = autocurry(remainder) assert rem(10, 3) == rem(10)(3) == rem()(10, 3) == 1 assert map(rem(by=3), range(5)) == [0, 1, 2, 0, 1] Can clean your code a bit when :func:`partial` makes it too cluttered. .. function:: compose(*fs) Returns composition of functions:: extract_int = compose(int, r'\d+') Supports :ref:`extended_fns`. .. function:: rcompose(*fs) Returns composition of functions, with functions called from left to right. Designed to facilitate transducer-like pipelines:: # Note the use of iterator function variants everywhere process = rcompose( partial(remove, is_useless), partial(map, process_row), partial(chunks, 100) ) for chunk in process(data): write_chunk_to_db(chunk) Supports :ref:`extended_fns`. .. function:: juxt(*fs) ljuxt(*fs) Takes several functions and returns a new function that is the juxtaposition of those. The resulting function takes a variable number of arguments, and returns an iterator or a list containing the result of applying each function to the arguments. .. function:: iffy([pred], action, [default=identity]) Returns function, which conditionally, depending on ``pred``, applies ``action`` or ``default``. If ``default`` is not callable then it is returned as is from resulting function. E.g. this will call all callable values leaving rest of them as is:: map(iffy(callable, caller()), values) Common use it to deal with messy data:: dirty_data = ['hello', None, 'bye'] lmap(iffy(len), dirty_data) # => [5, None, 3] lmap(iffy(isa(str), len, 0), dirty_data) # => [5, 0, 3], also safer See also :func:`silent` for easier use cases. Function logic -------------- This family of functions supports creating predicates from other predicates and regular expressions. .. function:: complement(pred) Constructs a negation of ``pred``, i.e. a function returning a boolean opposite of original function:: is_private = re_tester(r'^_') is_public = complement(is_private) # or just is_public = complement(r'^_') .. function:: all_fn(*fs) any_fn(*fs) none_fn(*fs) one_fn(*fs) Construct a predicate returning ``True`` when all, any, none or exactly one of ``fs`` return ``True``. Support short-circuit behavior. :: is_even_int = all_fn(isa(int), even) .. function:: some_fn(*fs) Constructs function calling ``fs`` one by one and returning first true result. Enables creating functions by short-circuiting several behaviours:: get_amount = some_fn( lambda s: 4 if 'set of' in s else None, r'(\d+) wheels?', compose({'one': 1, 'two': 2, 'pair': 2}, r'(\w+) wheels?') ) If you wonder how on Earth one can :func:`compose` dict and string see :ref:`extended_fns`. .. raw:: html :file: descriptions.html funcy-1.17/docs/index.rst000066400000000000000000000016531416001257100153610ustar00rootroot00000000000000Welcome to funcy documentation! ================================= Funcy is designed to be a layer of functional tools over python. Special topics: .. toctree:: :maxdepth: 1 overview cheatsheet extended_fns python3 Contents: .. toctree:: :maxdepth: 1 seqs colls funcs decorators flow strings calc types objects debug primitives Essays: - `Why Every Language Needs Its Underscore `_ - `Functional Python Made Easy `_ - `Abstracting Control Flow `_ - `Painless Decorators `_ You can also `look at the code `_ or `create an issue `_. funcy-1.17/docs/objects.rst000066400000000000000000000065461416001257100157110ustar00rootroot00000000000000Objects ======= .. decorator:: cached_property Creates a property caching its result. This is a great way to lazily attach some data to an object:: class MyUser(AbstractBaseUser): @cached_property def public_phones(self): return list(self.phones.filter(confirmed=True, public=True)) One can rewrite cached value simply by assigning and clear cache by deleting it:: user.public_phones = [...] del user.public_phones # will be populated on next access Note that the last line will raise ``AttributeError`` if cache is not set, to clear cache safely one might use:: user.__dict__.pop('public_phones') **CAVEAT:** only one cached value is stored for each property, so if you call ancestors cached property from outside of corresponding child property it will save ancestors value, which will prevent future evaluations from ever calling child function. .. decorator:: cached_readonly Creates a read-only property caching its result. Same as :func:`cached_property` but protected against rewrites. .. decorator:: wrap_prop(ctx) Wraps a property accessors with a context manager:: class SomeConnector: # We want several threads share this session, # but only one of them initialize it. @wrap_prop(threading.Lock()) @cached_property def session(self): # ... build a session Note that ``@wrap_prop()`` preserves descriptor type, i.e. wrapped cached property may still be rewritten and cleared the same way. .. decorator:: monkey(cls_or_module, name=None) Monkey-patches class or module by adding decorated function or property to it named ``name`` or the same as decorated function. Saves overwritten method to ``original`` attribute of decorated function for a kind of inheritance:: # A simple caching of all get requests, # even for models for which you can't easily change Manager @monkey(QuerySet) def get(self, *args, **kwargs): if not args and list(kwargs) == ['pk']: cache_key = '%s:%d' % (self.model, kwargs['pk']) result = cache.get(cache_key) if result is None: result = get.original(self, *args, **kwargs) cache.set(cache_key, result) return result else: return get.original(self, *args, **kwargs) .. class:: namespace A base class that prevents its member functions turning into methods:: class Checks(namespace): is_str = isa(str) max_len = lambda l: lambda value: len(value) <= l field_checks = all_fn(Checks.is_str, Checks.max_len(30)) This is noop in Python 3 as it doesn't have unbound methods anyway. .. class:: LazyObject(init) Creates a object only really setting itself up on first attribute access. Since attribute access happens immediately before any method call, this permits delaying initialization until first call:: @LazyObject def redis_client(): if isinstance(settings.REDIS, str): return StrictRedis.from_url(settings.REDIS) else: return StrictRedis(**settings.REDIS) # Will be only created on first use redis_client.set(...) .. raw:: html :file: descriptions.html funcy-1.17/docs/overview.rst000066400000000000000000000003121416001257100161070ustar00rootroot00000000000000.. _overview: Overview ======== Start with: :: pip install funcy .. include:: ../README.rst :start-after: -------------- :end-before: And `much more `_. funcy-1.17/docs/primitives.rst000066400000000000000000000020141416001257100164350ustar00rootroot00000000000000Primitives ========== .. function:: isnone(x) Checks if ``x`` is ``None``. Handy with filtering functions:: _, data = lsplit_by(isnone, dirty_data) # Skip leading nones Plays nice with :func:`silent`, which returns ``None`` on fail:: remove(isnone, map(silent(int), strings_with_numbers)) Note that it's usually simpler to use :func:`keep` or :func:`compact` if you don't need to distinguish between ``None`` and other falsy values. .. function:: notnone(x) Checks if ``x`` is not ``None``. A shortcut for ``complement(isnone)`` meant to be used when ``bool`` is not specific enough. Compare:: select_values(notnone, data_dict) # removes None values compact(data_dict) # removes all falsy values .. function:: inc(x) Increments its argument by 1. .. function:: dec(x) Decrements its argument by 1. .. function:: even(x) Checks if ``x`` is even. .. function:: odd(x) Checks if ``x`` is odd. .. raw:: html :file: descriptions.html funcy-1.17/docs/python3.rst000066400000000000000000000072221416001257100156540ustar00rootroot00000000000000Python 2/3 support ================== Funcy works with both python 2 and 3. However, it has slightly different interface. It follows python 3 convention of "iterator by default" for utilities like :func:`map`, :func:`filter` and such. When funcy has two versions of utility (list and iterator) they are named like :func:`keep` and :func:`ikeep` in python 2 and :func:`lkeep` and :func:`keep` in python 3. You can look up a full table of differently named functions below. Writing cross-python code ------------------------- You can do that two ways: writing python 2 code that works in python 3 or vice versa. You can import python 2 or 3 style functions from ``funcy.py2`` or ``funcy.py3``:: from funcy.py2 import whatever, you, need # write python 2 style code here :: from funcy.py3 import whatever, you, need # write python 3 style code here You can even import :func:`map`, :func:`filter`, :func:`py3:zip` and their list and iterator versions. Full table of python dependent function names --------------------------------------------- ====================== ======================= ===================== ========================== Python 2 / list Python 2 / iterator Python 3 / list Python 3 / iterator ====================== ======================= ===================== ========================== :func:`map` :func:`imap` :func:`lmap` :func:`map` :func:`filter` :func:`ifilter` :func:`lfilter` :func:`filter` :func:`py2:zip` :func:`~itertools.izip` :func:`lzip` :func:`py3:zip` :func:`remove` :func:`iremove` :func:`lremove` :func:`remove` :func:`keep` :func:`ikeep` :func:`lkeep` :func:`keep` :func:`without` :func:`iwithout` :func:`lwithout` :func:`without` :func:`concat` :func:`iconcat` :func:`lconcat` :func:`concat` :func:`cat` :func:`icat` :func:`lcat` :func:`cat` :func:`flatten` :func:`iflatten` :func:`lflatten` :func:`flatten` :func:`mapcat` :func:`imapcat` :func:`lmapcat` :func:`mapcat` :func:`distinct` :func:`idistinct` :func:`ldistinct` :func:`distinct` :func:`split` :func:`isplit` :func:`lsplit` :func:`split` :func:`split_at` :func:`isplit_at` :func:`lsplit_at` :func:`split_at` :func:`split_by` :func:`isplit_by` :func:`lsplit_by` :func:`split_by` :func:`partition` :func:`ipartition` :func:`lpartition` :func:`partition` :func:`chunks` :func:`ichunks` :func:`lchunks` :func:`chunks` :func:`partition_by` :func:`ipartition_by` :func:`lpartition_by` :func:`partition_by` :func:`reductions` :func:`ireductions` :func:`lreductions` :func:`reductions` :func:`sums` :func:`isums` :func:`lsums` :func:`sums` :func:`juxt` :func:`ijuxt` :func:`ljuxt` :func:`juxt` :func:`where` :func:`iwhere` :func:`lwhere` :func:`where` :func:`pluck` :func:`ipluck` :func:`lpluck` :func:`pluck` :func:`pluck_attr` :func:`ipluck_attr` :func:`lpluck_attr` :func:`pluck_attr` :func:`invoke` :func:`iinvoke` :func:`linvoke` :func:`invoke` *-* :func:`izip_values` *-* :func:`zip_values` *-* :func:`izip_dicts` *-* :func:`zip_dicts` ====================== ======================= ===================== ========================== .. raw:: html :file: descriptions.html funcy-1.17/docs/requirements.txt000066400000000000000000000000461416001257100167770ustar00rootroot00000000000000Sphinx==4.3.0 sphinx-rtd-theme==0.5.1 funcy-1.17/docs/seqs.rst000066400000000000000000000451621416001257100152300ustar00rootroot00000000000000Sequences ========= This functions are aimed at manipulating finite and infinite sequences of values. Some functions have two flavors: one returning list and other returning possibly infinite iterator, the latter ones follow convention of prepending ``i`` before list-returning function name. When working with sequences, see also :mod:`py3:itertools` standard module. Funcy reexports and aliases some functions from it. Generate -------- .. function:: repeat(item, [n]) Makes an iterator yielding ``item`` for ``n`` times or indefinitely if ``n`` is omitted. ``repeat`` simply repeats given value, when you need to reevaluate something repeatedly use :func:`repeatedly` instead. When you just need a length ``n`` list or tuple of ``item`` you can use:: [item] * n # or (item,) * n .. Is a reexport of :func:`itertools.repeat`. .. function:: count(start=0, step=1) Makes infinite iterator of values: ``start, start + step, start + 2*step, ...``. Could be used to generate sequence:: map(lambda x: x ** 2, count(1)) # -> 1, 4, 9, 16, ... Or annotate sequence using :func:`py3:zip`:: zip(count(), 'abcd') # -> (0, 'a'), (1, 'b'), (2, 'c'), (3, 'd') # print code with BASIC-style numbered lines for line in zip(count(10, 10), code.splitlines()): print '%d %s' % line See also :func:`py3:enumerate` and original :func:`py3:itertools.count` documentation. .. function:: cycle(seq) Cycles passed ``seq`` indefinitely returning its elements one by one. Useful when you need to cyclically decorate some sequence:: for n, parity in zip(count(), cycle(['even', 'odd'])): print '%d is %s' % (n, parity) .. Is a reexport of :func:`itertools.cycle`. .. function:: repeatedly(f, [n]) Takes a function of no args, presumably with side effects, and returns an infinite (or length ``n`` if supplied) iterator of calls to it. For example, this call can be used to generate 10 random numbers:: repeatedly(random.random, 10) Or one can create a length ``n`` list of freshly-created objects of same type:: repeatedly(list, n) .. function:: iterate(f, x) Returns an infinite iterator of ``x, f(x), f(f(x)), ...`` etc. Most common use is to generate some recursive sequence:: iterate(inc, 5) # -> 5, 6, 7, 8, 9, ... iterate(lambda x: x * 2, 1) # -> 1, 2, 4, 8, 16, ... step = lambda p: (p[1], p[0] + p[1]) map(first, iterate(step, (0, 1))) # -> 0, 1, 1, 2, 3, 5, 8, ... (Fibonacci sequence) Manipulate ---------- This section provides some robust tools for sequence slicing. Consider :ref:`py3:slicings` or :func:`py3:itertools.islice` for more generic cases. .. function:: take(n, seq) Returns a list of the first ``n`` items in the sequence, or all items if there are fewer than ``n``. :: take(3, [2, 3, 4, 5]) # [2, 3, 4] take(3, count(5)) # [5, 6, 7] take(3, 'ab') # ['a', 'b'] .. function:: drop(n, seq) Skips first ``n`` items in the sequence, returning iterator yielding rest of its items. :: drop(3, [2, 3, 4, 5]) # iter([5]) drop(3, count(5)) # count(8) drop(3, 'ab') # empty iterator .. function:: first(seq) Returns the first item in the sequence. Returns ``None`` if the sequence is empty. Typical usage is choosing first of some generated variants:: # Get a text message of first failed validation rule fail = first(rule.text for rule in rules if not rule.test(instance)) # Use simple pattern matching to construct form field widget TYPE_TO_WIDGET = ( [lambda f: f.choices, lambda f: Select(choices=f.choices)], [lambda f: f.type == 'int', lambda f: TextInput(coerce=int)], [lambda f: f.type == 'string', lambda f: TextInput()], [lambda f: f.type == 'text', lambda f: Textarea()], [lambda f: f.type == 'boolean', lambda f: Checkbox(f.label)], ) return first(do(field) for cond, do in TYPE_TO_WIDGET if cond(field)) Other common use case is passing to :func:`map` or :func:`lmap`. See last example in :func:`iterate` for such example. .. function:: second(seq) Returns the second item in given sequence. Returns ``None`` if there are less than two items in it. Could come in handy with sequences of pairs, e.g. :meth:`py3:dict.items`. Following code extract values of a dict sorted by keys:: map(second, sorted(some_dict.items())) And this line constructs an ordered by value dict from a plain one:: OrderedDict(sorted(plain_dict.items(), key=second)) .. function:: nth(n, seq) Returns nth item in sequence or ``None`` if no one exists. Items are counted from 0, so it's like indexed access but works for iterators. E.g. here is how one can get 6th line of `some_file`:: nth(5, repeatedly(open('some_file').readline)) .. function:: last(seq) Returns the last item in the sequence. Returns ``None`` if the sequence is empty. Tries to be efficient when sequence supports indexed or reversed access and fallbacks to iterating over it if not. .. function:: rest(seq) Skips first item in the sequence, returning iterator starting just after it. A shortcut for :func:`drop(1, seq) `. .. function:: butlast(seq) Returns an iterator of all elements of the sequence but last. .. function:: ilen(seq) Calculates length of iterator. Will consume it or hang up if it's infinite. Especially useful in conjunction with filtering or slicing functions, for example, this way one can find common start length of two strings:: ilen(takewhile(lambda (x, y): x == y, zip(s1, s2))) Unite ----- .. function:: concat(*seqs) lconcat(*seqs) Concats several sequences into single iterator or list. :func:`concat` is an alias for :func:`py3:itertools.chain`. .. function:: cat(seqs) lcat(seqs) Concatenates passed sequences. Useful when dealing with sequence of sequences, see :func:`concat` or :func:`lconcat` to join just a few sequences. Flattening of various nested sequences is most common use:: # Flatten two level deep list lcat(list_of_lists) # Get a flat html of errors of a form errors = cat(inline.errors() for inline in form) error_text = '
'.join(errors) # Brace expansion on product of sums # (a + b)(t + pq)x == atx + apqx + btx + bpqx terms = [['a', 'b'], ['t', 'pq'], ['x']] lmap(lcat, product(*terms)) # [list('atx'), list('apqx'), list('btx'), list('bpqx')] :func:`cat` is an alias for :meth:`py3:itertools.chain.from_iterable`. .. function:: flatten(seq, follow=is_seqcont) lflatten(seq, follow=is_seqcont) Flattens arbitrary nested sequence of values and other sequences. ``follow`` argument determines whether to unpack each item. By default it dives into lists, tuples and iterators, see :func:`is_seqcont` for further explanation. See also :func:`cat` or :func:`lcat` if you need to flatten strictly two-level sequence of sequences. .. function:: tree_leaves(root, follow=is_seqcont, children=iter) ltree_leaves(root, follow=is_seqcont, children=iter) A way to iterate or list over all the tree leaves. E.g. this is how you can list all descendants of a class:: ltree_leaves(Base, children=type.__subclasses__, follow=type.__subclasses__) .. function:: tree_nodes(root, follow=is_seqcont, children=iter) ltree_nodes(root, follow=is_seqcont, children=iter) A way to iterate or list over all the tree nodes. E.g. this is how you can iterate over all classes in hierarchy:: tree_nodes(Base, children=type.__subclasses__, follow=type.__subclasses__) .. function:: interleave(*seqs) Returns an iterator yielding first item in each sequence, then second and so on until some sequence ends. Numbers of items taken from all sequences are always equal. .. function:: interpose(sep, seq) Returns an iterator yielding elements of ``seq`` separated by ``sep``. This is like :meth:`py3:str.join` for lists. This code is a part of a translator working with operation node:: def visit_BoolOp(self, node): # ... do generic visit node.code = lmapcat(translate, interpose(node.op, node.values)) .. function:: lzip(*seqs) Joins given sequences into a list of tuples of corresponding first, second and later values. Essentially a list version of :func:`py3:zip` for Python 3. Transform and filter -------------------- Most of functions in this section support :ref:`extended_fns`. Among other things it allows to rewrite examples using :func:`re_tester` and :func:`re_finder` tighter. .. function:: map(f, seq) lmap(f, seq) Extended versions of :func:`py3:map` and its list version. .. function:: filter(pred, seq) lfilter(pred, seq) Extended versions of :func:`py3:filter` and its list version. .. function:: remove(pred, seq) lremove(pred, seq) Returns an iterator or a list of items of ``seq`` that result in false when passed to ``pred``. The results of this functions complement results of :func:`filter` and :func:`lfilter`. A handy use is passing :func:`re_tester` result as ``pred``. For example, this code removes any whitespace-only lines from list:: remove(re_tester('^\s+$'), lines) Note, you can rewrite it shorter using :ref:`extended_fns`:: remove('^\s+$', lines) .. function:: keep([f], seq) lkeep([f], seq) Maps ``seq`` with given function and then filters out falsy elements. Simply removes falsy items when ``f`` is absent. In fact these functions are just handy shortcuts:: keep(f, seq) == filter(bool, map(f, seq)) keep(seq) == filter(bool, seq) lkeep(f, seq) == lfilter(bool, map(f, seq)) lkeep(seq) == lfilter(bool, seq) Natural use case for :func:`keep` is data extraction or recognition that could eventually fail:: # Extract numbers from words lkeep(re_finder(r'\d+'), words) # Recognize as many colors by name as possible lkeep(COLOR_BY_NAME.get, color_names) An iterator version can be useful when you don't need or not sure you need the whole sequence. For example, you can use :func:`first` - :func:`keep` combo to find out first match:: first(keep(COLOR_BY_NAME.get, color_name_candidates)) Alternatively, you can do the same with :func:`some` and :func:`map`. One argument variant is a simple tool to keep your data free of falsy junk. This one returns non-empty description lines:: keep(description.splitlines()) Other common case is using generator expression instead of mapping function. Consider these two lines:: keep(f.name for f in fields) # sugar generator expression keep(attrgetter('name'), fields) # pure functions .. function:: mapcat(f, *seqs) lmapcat(f, *seqs) Maps given sequence(s) and then concatenates results, essentially a shortcut for ``cat(map(f, *seqs))``. Come in handy when extracting multiple values from every sequence item or transforming nested sequences:: # Get all the lines of all the texts in single flat list mapcat(str.splitlines, bunch_of_texts) # Extract all numbers from strings mapcat(partial(re_all, r'\d+'), bunch_of_strings) .. function:: without(seq, *items) lwithout(seq, *items) Returns sequence with ``items`` removed, preserves order. Designed to work with a few ``items``, this allows removing unhashable objects:: non_empty_lists = without(lists, []) In case of large amount of unwanted elements one can use :func:`remove`:: remove(set(unwanted_elements), seq) Or simple set difference if order of sequence is irrelevant. Split and chunk --------------- .. function:: split(pred, seq) lsplit(pred, seq) Splits sequence items which pass predicate from the ones that don't, essentially returning a tuple ``filter(pred, seq), remove(pred, seq)``. For example, this way one can separate private attributes of an instance from public ones:: private, public = lsplit(re_tester('^_'), dir(instance)) Split absolute and relative urls using extended predicate semantics:: absolute, relative = lsplit(r'^http://', urls) .. function:: split_at(n, seq) lsplit_at(n, seq) Splits sequence at given position, returning a tuple of its start and tail. .. function:: split_by(pred, seq) lsplit_by(pred, seq) Splits start of sequence, consisting of items passing predicate, from the rest of it. Works similar to ``takewhile(pred, seq), dropwhile(pred, seq)``, but works with iterator ``seq`` correctly:: lsplit_by(bool, iter([-2, -1, 0, 1, 2])) # [-2, -1], [0, 1, 2] .. function:: takewhile([pred], seq) Yeilds elements of ``seq`` as long as they pass ``pred``. Stops on first one which makes predicate falsy:: # Extract first paragraph of text takewhile(re_tester(r'\S'), text.splitlines()) # Build path from node to tree root takewhile(bool, iterate(attrgetter('parent'), node)) .. function:: dropwhile([pred], seq) This is a mirror of :func:`takewhile`. Skips elements of given sequence while ``pred`` is true and yields the rest of it:: # Skip leading whitespace-only lines dropwhile(re_tester('^\s*$'), text_lines) .. function:: group_by(f, seq) Groups elements of ``seq`` keyed by the result of ``f``. The value at each key will be a list of the corresponding elements, in the order they appear in ``seq``. Returns :class:`defaultdict(list) `. :: stats = group_by(len, ['a', 'ab', 'b']) stats[1] # -> ['a', 'b'] stats[2] # -> ['ab'] stats[3] # -> [], since stats is defaultdict One can use :func:`split` when grouping by boolean predicate. See also :func:`py3:itertools.groupby`. .. function:: group_by_keys(get_keys, seq) Groups elements of ``seq`` having multiple keys each into :class:`defaultdict(list) `. Can be used to reverse grouping:: posts_by_tag = group_by_keys(attrgetter('tags'), posts) sentences_with_word = group_by_keys(str.split, sentences) .. function:: group_values(seq) Groups values of ``(key, value)`` pairs. May think of it like ``dict()`` but collecting collisions: :: group_values(keep(r'^--(\w+)=(.+)', sys.argv)) .. function:: partition(n, [step], seq) lpartition(n, [step], seq) Iterates or lists over partitions of ``n`` items, at offsets ``step`` apart. If ``step`` is not supplied, defaults to ``n``, i.e. the partitions do not overlap. Returns only full length-``n`` partitions, in case there are not enough elements for last partition they are ignored. Most common use is deflattening data:: # Make a dict from flat list of pairs dict(partition(2, flat_list_of_pairs)) # Structure user credentials {id: (name, password) for id, name, password in partition(3, users)} A three argument variant of :func:`partition` can be used to process sequence items in context of their neighbors:: # Smooth data by averaging out with a sliding window [sum(window) / n for window in partition(n, 1, data_points)] Also look at :func:`pairwise` for similar use. Other use of :func:`partition` is processing sequence of data elements or jobs in chunks, but take a look at :func:`chunks` for that. .. function:: chunks(n, [step], seq) lchunks(n, [step], seq) Like :func:`partition`, but may include partitions with fewer than ``n`` items at the end:: chunks(2, 'abcde') # -> 'ab', 'cd', 'e' chunks(2, 4, 'abcde') # -> 'ab', 'e' Handy for batch processing. .. function:: partition_by(f, seq) lpartition_by(f, seq) Partition ``seq`` into list of lists or iterator of iterators splitting at ``f(item)`` change. Data handling ------------- .. function:: distinct(seq, key=identity) ldistinct(seq, key=identity) Returns unique items of the sequence with order preserved. If ``key`` is supplied then distinguishes values by comparing their keys. .. note:: Elements of a sequence or their keys should be hashable. .. function:: with_prev(seq, fill=None) Returns an iterator of a pair of each item with one preceding it. Yields `fill` or `None` as preceding element for first item. Great for getting rid of clunky ``prev`` housekeeping in for loops. This way one can indent first line of each paragraph while printing text:: for line, prev in with_prev(text.splitlines()): if not prev: print ' ', print line Use :func:`pairwise` to iterate only on full pairs. .. function:: with_next(seq, fill=None) Returns an iterator of a pair of each item with one next to it. Yields `fill` or `None` as next element for last item. See also :func:`with_prev` and :func:`pairwise`. .. function:: pairwise(seq) Yields pairs of items in ``seq`` like ``(item0, item1), (item1, item2), ...``. A great way to process sequence items in a context of each neighbor:: # Check if seq is non-descending all(left <= right for left, right in pairwise(seq)) .. function:: count_by(f, seq) Counts numbers of occurrences of values of ``f`` on elements of ``seq``. Returns :class:`defaultdict(int) ` of counts. Calculating a histogram is one common use:: # Get a length histogram of given words count_by(len, words) .. function:: count_reps(seq) Counts number of repetitions of each value in ``seq``. Returns :class:`defaultdict(int) ` of counts. This is faster and shorter alternative to ``count_by(identity, ...)`` .. function:: reductions(f, seq, [acc]) lreductions(f, seq, [acc]) Returns a sequence of the intermediate values of the reduction of ``seq`` by ``f``. In other words it yields a sequence like:: reduce(f, seq[:1], [acc]), reduce(f, seq[:2], [acc]), ... You can use :func:`sums` or :func:`lsums` for a common use of getting list of partial sums. .. function:: sums(seq, [acc]) lsums(seq, [acc]) Same as :func:`reductions` or :func:`lreductions` with reduce function fixed to addition. Find out which straw will break camels back:: first(i for i, total in enumerate(sums(straw_weights)) if total > camel_toughness) .. raw:: html :file: descriptions.html funcy-1.17/docs/strings.rst000066400000000000000000000050361416001257100157420ustar00rootroot00000000000000String utils ============ .. Prevent text wrap in captures table .. raw:: html .. function:: re_find(regex, s, flags=0) Finds ``regex`` in ``s``, returning the match in the simplest possible form guessed by captures in given regular expression: ================================= ================================== Captures Return value ================================= ================================== no captures a matched string single positional capture a substring matched by capture only positional captures a tuple of substrings for captures only named captures a dict of substrings for captures mixed pos/named captures a match object ================================= ================================== Returns ``None`` on mismatch. :: # Find first number in a line silent(int)(re_find(r'\d+', line)) # Find number of men in a line re_find(r'(\d+) m[ae]n', line) # Parse uri into nice dict re_find(r'^/post/(?P\d+)/(?P\w+)$', uri) .. function:: re_test(regex, s, flags=0) Tests whether ``regex`` can be found in ``s``. .. function:: re_all(regex, s, flags=0) re_iter(regex, s, flags=0) Returns a list or an iterator of all matches of ``regex`` in ``s``. Matches are presented in most simple form possible, see table in :func:`re_find` docs. :: # A fast and dirty way to parse ini section into dict dict(re_iter('(\w+)=(\w+)', ini_text)) .. function:: re_finder(regex, flags=0) Returns a function that calls :func:`re_find` for its sole argument. Its main purpose is quickly constructing mapper functions for :func:`map` and friends. See also :ref:`extended_fns`. .. function:: re_tester(regex, flags=0) Returns a function that calls :func:`re_test` for it's sole argument. Aimed at quick construction of predicates for use in :func:`filter` and friends. See also :ref:`extended_fns`. .. function:: str_join([sep=""], seq) Joins sequence with ``sep``. Same as ``sep.join(seq)``, but forcefully converts all elements to separator type, ``str`` by default. See also :func:`joining`. .. function:: cut_prefix(s, prefix) Cuts prefix from given string if it's present. .. function:: cut_suffix(s, suffix) Cuts suffix from given string if it's present. .. raw:: html :file: descriptions.html funcy-1.17/docs/types.rst000066400000000000000000000022401416001257100154070ustar00rootroot00000000000000Type testing ============ .. function:: isa(*types) Returns function checking if its argument is of any of given ``types``. Split labels from ids:: labels, ids = lsplit(isa(str), values) .. function:: is_mapping(value) is_set(value) is_list(value) is_tuple(value) is_seq(value) is_iter(value) These functions check if value is ``Mapping``, ``Set``, ``list``, ``tuple``, ``Sequence`` or iterator respectively. .. function:: is_seqcoll(value) Checks if ``value`` is a list or a tuple, which are both sequences and collections. .. function:: is_seqcont(value) Checks if ``value`` is a list, a tuple or an iterator, which are sequential containers. It can be used to distinguish between value and multiple values in dual-interface functions:: def add_to_selection(view, region): if is_seqcont(region): # A sequence of regions view.sel().add_all(region) else: view.sel().add(region) .. function:: iterable(value) Tests if ``value`` is iterable. .. raw:: html :file: descriptions.html funcy-1.17/funcy/000077500000000000000000000000001416001257100137075ustar00rootroot00000000000000funcy-1.17/funcy/__init__.py000066400000000000000000000002501416001257100160150ustar00rootroot00000000000000from .compat import PY2 if PY2: from .py2 import * # noqa from .py2 import __all__ else: from .py3 import * # noqa from .py3 import __all__ # noqa funcy-1.17/funcy/_inspect.py000066400000000000000000000140311416001257100160640ustar00rootroot00000000000000from __future__ import absolute_import from inspect import CO_VARARGS, CO_VARKEYWORDS try: from inspect import signature except ImportError: signature = None # from collections import namedtuple import types import re from .compat import PY2 from .decorators import unwrap # This provides sufficient introspection for *curry() functions. # # We only really need a number of required positional arguments. # If arguments can be specified by name (not true for many builtin functions), # then we need to now their names to ignore anything else passed by name. # # Stars mean some positional argument which can't be passed by name. # Functions not mentioned here get one star "spec". ARGS = {} builtins_name = '__builtin__' if PY2 else 'builtins' ARGS[builtins_name] = { 'bool': 'x', 'complex': 'real,imag', 'enumerate': 'sequence,start' if PY2 else 'iterable,start', 'file': 'file-**', 'float': 'x', 'int': 'x-*', 'long': 'x-*', 'open': 'name-**' if PY2 else 'file-**', 'round': 'number-*', 'setattr': '***', 'str': '*-*' if PY2 else 'object-*', 'unicode': 'string-**', '__import__': 'name-****', '__buildclass__': '***', # Complex functions with different set of arguments 'iter': '*-*', 'format': '*-*', 'type': '*-**', } # Add two argument functions two_arg_funcs = '''cmp coerce delattr divmod filter getattr hasattr isinstance issubclass map pow reduce''' ARGS[builtins_name].update(dict.fromkeys(two_arg_funcs.split(), '**')) ARGS['functools'] = {'reduce': '**'} ARGS['itertools'] = { 'accumulate': 'iterable-*', 'combinations': 'iterable,r', 'combinations_with_replacement': 'iterable,r', 'compress': 'data,selectors', 'groupby': 'iterable-*', 'permutations': 'iterable-*', 'repeat': 'object-*', } two_arg_funcs = 'dropwhile filterfalse ifilter ifilterfalse starmap takewhile' ARGS['itertools'].update(dict.fromkeys(two_arg_funcs.split(), '**')) ARGS['operator'] = { 'delslice': '***', 'getslice': '***', 'setitem': '***', 'setslice': '****', } two_arg_funcs = """ _compare_digest add and_ concat contains countOf delitem div eq floordiv ge getitem gt iadd iand iconcat idiv ifloordiv ilshift imatmul imod imul indexOf ior ipow irepeat irshift is_ is_not isub itruediv ixor le lshift lt matmul mod mul ne or_ pow repeat rshift sequenceIncludes sub truediv xor """ ARGS['operator'].update(dict.fromkeys(two_arg_funcs.split(), '**')) ARGS['operator'].update([ ('__%s__' % op.strip('_'), args) for op, args in ARGS['operator'].items()]) ARGS['_operator'] = ARGS['operator'] # Fixate this STD_MODULES = set(ARGS) # Describe some funcy functions, mostly for r?curry() ARGS['funcy.seqs'] = { 'map': 'f*', 'lmap': 'f*', 'xmap': 'f*', 'mapcat': 'f*', 'lmapcat': 'f*', } ARGS['funcy.colls'] = { 'merge_with': 'f*', } type_classes = (type, types.ClassType) if hasattr(types, 'ClassType') else type Spec = namedtuple("Spec", "max_n names req_n req_names kw") def get_spec(func, _cache={}): func = getattr(func, '__original__', None) or unwrap(func) try: return _cache[func] except (KeyError, TypeError): pass mod = getattr(func, '__module__', None) if mod in STD_MODULES or mod in ARGS and func.__name__ in ARGS[mod]: _spec = ARGS[mod].get(func.__name__, '*') required, _, optional = _spec.partition('-') req_names = re.findall(r'\w+|\*', required) # a list with dups of * max_n = len(req_names) + len(optional) req_n = len(req_names) spec = Spec(max_n=max_n, names=set(), req_n=req_n, req_names=set(req_names), kw=False) _cache[func] = spec return spec elif isinstance(func, type_classes): # Old style classes without base if not hasattr(func, '__init__'): return Spec(max_n=0, names=set(), req_n=0, req_names=set(), kw=False) # __init__ inherited from builtin classes objclass = getattr(func.__init__, '__objclass__', None) if objclass and objclass is not func: return get_spec(objclass) # Introspect constructor and remove self spec = get_spec(func.__init__) self_set = set([func.__init__.__code__.co_varnames[0]]) return spec._replace(max_n=spec.max_n - 1, names=spec.names - self_set, req_n=spec.req_n - 1, req_names=spec.req_names - self_set) else: try: defaults_n = len(func.__defaults__) except (AttributeError, TypeError): defaults_n = 0 try: varnames = func.__code__.co_varnames n = func.__code__.co_argcount names = set(varnames[:n]) req_n = n - defaults_n req_names = set(varnames[:req_n]) kw = bool(func.__code__.co_flags & CO_VARKEYWORDS) # If there are varargs they could be required, but all keywords args can't be max_n = req_n + 1 if func.__code__.co_flags & CO_VARARGS else n return Spec(max_n=max_n, names=names, req_n=req_n, req_names=req_names, kw=kw) except AttributeError: # We use signature last to be fully backwards compatible. Also it's slower try: sig = signature(func) except (ValueError, TypeError): raise ValueError('Unable to introspect %s() arguments' % (getattr(func, '__qualname__', None) or getattr(func, '__name__', func))) else: spec = _cache[func] = _sig_to_spec(sig) return spec def _sig_to_spec(sig): max_n, names, req_n, req_names, kw = 0, set(), 0, set(), False for name, param in sig.parameters.items(): max_n += 1 if param.kind == param.VAR_KEYWORD: kw = True elif param.kind == param.VAR_POSITIONAL: req_n += 1 else: names.add(name) if param.default is param.empty: req_n += 1 req_names.add(name) return Spec(max_n=max_n, names=names, req_n=req_n, req_names=req_names, kw=kw) funcy-1.17/funcy/calc.py000066400000000000000000000117431416001257100151710ustar00rootroot00000000000000from datetime import timedelta import time import inspect from collections import deque from bisect import bisect from .decorators import wraps from .compat import PY2 __all__ = ['memoize', 'make_lookuper', 'silent_lookuper', 'cache'] class SkipMemory(Exception): pass SkipMemoization = SkipMemory # Old name # TODO: use real kwonly once in Python 3 only def memoize(*args, **kwargs): """@memoize(key_func=None). Makes decorated function memoize its results. If key_func is specified uses key_func(*func_args, **func_kwargs) as memory key. Otherwise uses args + tuple(sorted(kwargs.items())) Exposes its memory via .memory attribute. """ if args: assert len(args) == 1 assert not kwargs return memoize()(args[0]) key_func = kwargs.pop('key_func', None) if kwargs: raise TypeError('memoize() got unexpected keyword arguments: %s', ', '.join(kwargs)) return _memory_decorator({}, key_func) memoize.skip = SkipMemory def cache(timeout, key_func=None): """Caches a function results for timeout seconds.""" if isinstance(timeout, timedelta): timeout = timeout.total_seconds() return _memory_decorator(CacheMemory(timeout), key_func) cache.skip = SkipMemory def _memory_decorator(memory, key_func): def decorator(func): @wraps(func) def wrapper(*args, **kwargs): # We inline this here since @memoize also targets microoptimizations key = key_func(*args, **kwargs) if key_func else \ args + tuple(sorted(kwargs.items())) if kwargs else args try: return memory[key] except KeyError: try: value = memory[key] = func(*args, **kwargs) return value except SkipMemoization as e: return e.args[0] if e.args else None def invalidate(*args, **kwargs): key = key_func(*args, **kwargs) if key_func else \ args + tuple(sorted(kwargs.items())) if kwargs else args memory.pop(key, None) wrapper.invalidate = invalidate def invalidate_all(): memory.clear() wrapper.invalidate_all = invalidate_all wrapper.memory = memory return wrapper return decorator class CacheMemory(dict): def __init__(self, timeout): self.timeout = timeout self.clear() def __setitem__(self, key, value): expires_at = time.time() + self.timeout dict.__setitem__(self, key, (value, expires_at)) self._keys.append(key) self._expires.append(expires_at) def __getitem__(self, key): value, expires_at = dict.__getitem__(self, key) if expires_at <= time.time(): self.expire() raise KeyError(key) return value def expire(self): i = bisect(self._expires, time.time()) for _ in range(i): self._expires.popleft() self.pop(self._keys.popleft(), None) def clear(self): dict.clear(self) self._keys = deque() self._expires = deque() def _make_lookuper(silent): def make_lookuper(func): """ Creates a single argument function looking up result in a memory. Decorated function is called once on first lookup and should return all available arg-value pairs. Resulting function will raise LookupError when using @make_lookuper or simply return None when using @silent_lookuper. """ has_args, has_keys = has_arg_types(func) assert not has_keys, \ 'Lookup table building function should not have keyword arguments' if has_args: @memoize def wrapper(*args): f = lambda: func(*args) f.__name__ = '%s(%s)' % (func.__name__, ', '.join(map(str, args))) return make_lookuper(f) else: memory = {} def wrapper(arg): if not memory: memory[object()] = None # prevent continuos memory refilling memory.update(func()) if silent: return memory.get(arg) elif arg in memory: return memory[arg] else: raise LookupError("Failed to look up %s(%s)" % (func.__name__, arg)) return wraps(func)(wrapper) return make_lookuper make_lookuper = _make_lookuper(False) silent_lookuper = _make_lookuper(True) silent_lookuper.__name__ = 'silent_lookuper' if PY2: def has_arg_types(func): spec = inspect.getargspec(func) return bool(spec.args or spec.varargs), bool(spec.keywords) else: def has_arg_types(func): params = inspect.signature(func).parameters.values() return any(p.kind in (p.POSITIONAL_ONLY, p.POSITIONAL_OR_KEYWORD, p.VAR_POSITIONAL) for p in params), \ any(p.kind in (p.KEYWORD_ONLY, p.VAR_KEYWORD) for p in params) funcy-1.17/funcy/colls.py000066400000000000000000000261221416001257100154000ustar00rootroot00000000000000try: from __builtin__ import all as _all, any as _any except ImportError: from builtins import all as _all, any as _any from copy import copy from operator import itemgetter, methodcaller, attrgetter from itertools import chain, tee from collections import defaultdict from .compat import basestring, range, zip, map, filter, PY2, Mapping, Set, Iterable, Iterator from .primitives import EMPTY from .funcs import partial, compose from .funcmakers import make_func, make_pred from .seqs import take, xmap, filter as xfilter __all__ = ['empty', 'iteritems', 'itervalues', 'join', 'merge', 'join_with', 'merge_with', 'walk', 'walk_keys', 'walk_values', 'select', 'select_keys', 'select_values', 'compact', 'is_distinct', 'all', 'any', 'none', 'one', 'some', 'zipdict', 'flip', 'project', 'omit', 'zip_values', 'zip_dicts', 'where', 'pluck', 'pluck_attr', 'invoke', 'lwhere', 'lpluck', 'lpluck_attr', 'linvoke', 'get_in', 'set_in', 'update_in', 'del_in', 'has_path'] ### Generic ops FACTORY_REPLACE = { type(object.__dict__): dict, type({}.keys()): list, type({}.values()): list, type({}.items()): list, } def _factory(coll, mapper=None): coll_type = type(coll) # Hack for defaultdicts overridden constructor if isinstance(coll, defaultdict): item_factory = compose(mapper, coll.default_factory) if mapper and coll.default_factory \ else coll.default_factory return partial(defaultdict, item_factory) elif isinstance(coll, Iterator): return iter elif isinstance(coll, basestring): return coll_type().join elif coll_type in FACTORY_REPLACE: return FACTORY_REPLACE[coll_type] else: return coll_type def empty(coll): """Creates an empty collection of the same type.""" if isinstance(coll, Iterator): return iter([]) return _factory(coll)() if PY2: def iteritems(coll): return coll.iteritems() if hasattr(coll, 'iteritems') else coll def itervalues(coll): return coll.itervalues() if hasattr(coll, 'itervalues') else coll else: def iteritems(coll): return coll.items() if hasattr(coll, 'items') else coll def itervalues(coll): return coll.values() if hasattr(coll, 'values') else coll iteritems.__doc__ = "Yields (key, value) pairs of the given collection." itervalues.__doc__ = "Yields values of the given collection." def join(colls): """Joins several collections of same type into one.""" colls, colls_copy = tee(colls) it = iter(colls_copy) try: dest = next(it) except StopIteration: return None cls = dest.__class__ if isinstance(dest, basestring): return ''.join(colls) elif isinstance(dest, Mapping): result = dest.copy() for d in it: result.update(d) return result elif isinstance(dest, Set): return dest.union(*it) elif isinstance(dest, (Iterator, range)): return chain.from_iterable(colls) elif isinstance(dest, Iterable): # NOTE: this could be reduce(concat, ...), # more effective for low count return cls(chain.from_iterable(colls)) else: raise TypeError("Don't know how to join %s" % cls.__name__) def merge(*colls): """Merges several collections of same type into one. Works with dicts, sets, lists, tuples, iterators and strings. For dicts later values take precedence.""" return join(colls) def join_with(f, dicts): """Joins several dicts, combining values with given function.""" dicts = list(dicts) if not dicts: return {} elif len(dicts) == 1: return dicts[0] lists = {} for c in dicts: for k, v in iteritems(c): if k in lists: lists[k].append(v) else: lists[k] = [v] if f is not list: # kind of walk_values() inplace for k, v in iteritems(lists): lists[k] = f(v) return lists def merge_with(f, *dicts): """Merges several dicts, combining values with given function.""" return join_with(f, dicts) def walk(f, coll): """Walks the collection transforming its elements with f. Same as map, but preserves coll type.""" return _factory(coll)(xmap(f, iteritems(coll))) def walk_keys(f, coll): """Walks keys of the collection, mapping them with f.""" f = make_func(f) # NOTE: we use this awkward construct instead of lambda to be Python 3 compatible def pair_f(pair): k, v = pair return f(k), v return walk(pair_f, coll) def walk_values(f, coll): """Walks values of the collection, mapping them with f.""" f = make_func(f) # NOTE: we use this awkward construct instead of lambda to be Python 3 compatible def pair_f(pair): k, v = pair return k, f(v) return _factory(coll, mapper=f)(xmap(pair_f, iteritems(coll))) # TODO: prewalk, postwalk and friends def select(pred, coll): """Same as filter but preserves coll type.""" return _factory(coll)(xfilter(pred, iteritems(coll))) def select_keys(pred, coll): """Select part of the collection with keys passing pred.""" pred = make_pred(pred) return select(lambda pair: pred(pair[0]), coll) def select_values(pred, coll): """Select part of the collection with values passing pred.""" pred = make_pred(pred) return select(lambda pair: pred(pair[1]), coll) def compact(coll): """Removes falsy values from the collection.""" if isinstance(coll, Mapping): return select_values(bool, coll) else: return select(bool, coll) ### Content tests def is_distinct(coll, key=EMPTY): """Checks if all elements in the collection are different.""" if key is EMPTY: return len(coll) == len(set(coll)) else: return len(coll) == len(set(xmap(key, coll))) def all(pred, seq=EMPTY): """Checks if all items in seq pass pred (or are truthy).""" if seq is EMPTY: return _all(pred) return _all(xmap(pred, seq)) def any(pred, seq=EMPTY): """Checks if any item in seq passes pred (or is truthy).""" if seq is EMPTY: return _any(pred) return _any(xmap(pred, seq)) def none(pred, seq=EMPTY): """"Checks if none of the items in seq pass pred (or are truthy).""" return not any(pred, seq) def one(pred, seq=EMPTY): """Checks whether exactly one item in seq passes pred (or is truthy).""" if seq is EMPTY: return one(bool, pred) return len(take(2, xfilter(pred, seq))) == 1 # Not same as in clojure! returns value found not pred(value) def some(pred, seq=EMPTY): """Finds first item in seq passing pred or first that is truthy.""" if seq is EMPTY: return some(bool, pred) return next(xfilter(pred, seq), None) # TODO: a variant of some that returns mapped value, # one can use some(map(f, seq)) or first(keep(f, seq)) for now. # TODO: vector comparison tests - ascending, descending and such # def chain_test(compare, seq): # return all(compare, zip(seq, rest(seq)) def zipdict(keys, vals): """Creates a dict with keys mapped to the corresponding vals.""" return dict(zip(keys, vals)) def flip(mapping): """Flip passed dict or collection of pairs swapping its keys and values.""" def flip_pair(pair): k, v = pair return v, k return walk(flip_pair, mapping) def project(mapping, keys): """Leaves only given keys in mapping.""" return _factory(mapping)((k, mapping[k]) for k in keys if k in mapping) def omit(mapping, keys): """Removes given keys from mapping.""" return _factory(mapping)((k, v) for k, v in iteritems(mapping) if k not in keys) def zip_values(*dicts): """Yields tuples of corresponding values of several dicts.""" if len(dicts) < 1: raise TypeError('zip_values expects at least one argument') keys = set.intersection(*map(set, dicts)) for key in keys: yield tuple(d[key] for d in dicts) def zip_dicts(*dicts): """Yields tuples like (key, (val1, val2, ...)) for each common key in all given dicts.""" if len(dicts) < 1: raise TypeError('zip_dicts expects at least one argument') keys = set.intersection(*map(set, dicts)) for key in keys: yield key, tuple(d[key] for d in dicts) def get_in(coll, path, default=None): """Returns a value at path in the given nested collection.""" for key in path: try: coll = coll[key] except (KeyError, IndexError): return default return coll def set_in(coll, path, value): """Creates a copy of coll with the value set at path.""" return update_in(coll, path, lambda _: value) def update_in(coll, path, update, default=None): """Creates a copy of coll with a value updated at path.""" if not path: return update(coll) elif isinstance(coll, list): copy = coll[:] # NOTE: there is no auto-vivication for lists copy[path[0]] = update_in(copy[path[0]], path[1:], update, default) return copy else: copy = coll.copy() current_default = {} if len(path) > 1 else default copy[path[0]] = update_in(copy.get(path[0], current_default), path[1:], update, default) return copy def del_in(coll, path): """Creates a copy of coll with a nested key or index deleted.""" if not path: return coll try: next_coll = coll[path[0]] except (KeyError, IndexError): return coll coll_copy = copy(coll) if len(path) == 1: del coll_copy[path[0]] else: coll_copy[path[0]] = del_in(next_coll, path[1:]) return coll_copy def has_path(coll, path): """Checks if path exists in the given nested collection.""" for p in path: try: coll = coll[p] except (KeyError, IndexError): return False return True def lwhere(mappings, **cond): """Selects mappings containing all pairs in cond.""" return list(where(mappings, **cond)) def lpluck(key, mappings): """Lists values for key in each mapping.""" return list(pluck(key, mappings)) def lpluck_attr(attr, objects): """Lists values of given attribute of each object.""" return list(pluck_attr(attr, objects)) def linvoke(objects, name, *args, **kwargs): """Makes a list of results of the obj.name(*args, **kwargs) for each object in objects.""" return list(invoke(objects, name, *args, **kwargs)) # Iterator versions for python 3 interface def where(mappings, **cond): """Iterates over mappings containing all pairs in cond.""" items = cond.items() match = lambda m: all(k in m and m[k] == v for k, v in items) return filter(match, mappings) def pluck(key, mappings): """Iterates over values for key in mappings.""" return map(itemgetter(key), mappings) def pluck_attr(attr, objects): """Iterates over values of given attribute of given objects.""" return map(attrgetter(attr), objects) def invoke(objects, name, *args, **kwargs): """Yields results of the obj.name(*args, **kwargs) for each object in objects.""" return map(methodcaller(name, *args, **kwargs), objects) funcy-1.17/funcy/compat.py000066400000000000000000000021121416001257100155400ustar00rootroot00000000000000try: # Python 3 from itertools import filterfalse filter, map, zip, range = filter, map, zip, range # noqa basestring = (bytes, str) def lmap(f, *seqs): return list(map(f, *seqs)) def lfilter(f, seq): return list(filter(f, seq)) except ImportError: # Python 2 lmap, lfilter, range = map, filter, xrange # noqa from itertools import (ifilter as filter, imap as map, izip as zip, # noqa ifilterfalse as filterfalse) basestring = basestring # noqa # collections.abc was added in Python 3.3 try: from collections.abc import Mapping, Set, Sequence, Iterable, Iterator, Hashable # noqa except ImportError: from collections import Mapping, Set, Sequence, Iterable, Iterator, Hashable # noqa import sys PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 # Taken from six and simplified if PY3: exec("""def raise_from(value, from_value): try: raise value from from_value finally: value = None """) else: def raise_from(value, from_value): raise value funcy-1.17/funcy/debug.py000066400000000000000000000171101416001257100153470ustar00rootroot00000000000000# -*- coding: utf-8 -*- from __future__ import print_function import re import time import traceback from itertools import chain from functools import partial from .compat import basestring from .decorators import decorator, wraps, Call __all__ = [ 'tap', 'log_calls', 'print_calls', 'log_enters', 'print_enters', 'log_exits', 'print_exits', 'log_errors', 'print_errors', 'log_durations', 'print_durations', 'log_iter_durations', 'print_iter_durations', ] REPR_LEN = 25 def tap(x, label=None): """Prints x and then returns it.""" if label: print('%s: %s' % (label, x)) else: print(x) return x @decorator def log_calls(call, print_func, errors=True, stack=True, repr_len=REPR_LEN): """Logs or prints all function calls, including arguments, results and raised exceptions.""" signature = signature_repr(call, repr_len) try: print_func('Call %s' % signature) result = call() # NOTE: using full repr of result print_func('-> %s from %s' % (smart_repr(result, max_len=None), signature)) return result except BaseException as e: if errors: print_func('-> ' + _format_error(signature, e, stack)) raise def print_calls(errors=True, stack=True, repr_len=REPR_LEN): if callable(errors): return log_calls(print)(errors) else: return log_calls(print, errors, stack, repr_len) print_calls.__doc__ = log_calls.__doc__ @decorator def log_enters(call, print_func, repr_len=REPR_LEN): """Logs each entrance to a function.""" print_func('Call %s' % signature_repr(call, repr_len)) return call() def print_enters(repr_len=REPR_LEN): """Prints on each entrance to a function.""" if callable(repr_len): return log_enters(print)(repr_len) else: return log_enters(print, repr_len) @decorator def log_exits(call, print_func, errors=True, stack=True, repr_len=REPR_LEN): """Logs exits from a function.""" signature = signature_repr(call, repr_len) try: result = call() # NOTE: using full repr of result print_func('-> %s from %s' % (smart_repr(result, max_len=None), signature)) return result except BaseException as e: if errors: print_func('-> ' + _format_error(signature, e, stack)) raise def print_exits(errors=True, stack=True, repr_len=REPR_LEN): """Prints on exits from a function.""" if callable(errors): return log_exits(print)(errors) else: return log_exits(print, errors, stack, repr_len) class LabeledContextDecorator(object): """ A context manager which also works as decorator, passing call signature as its label. """ def __init__(self, print_func, label=None, repr_len=REPR_LEN): self.print_func = print_func self.label = label self.repr_len = repr_len def __call__(self, label=None, **kwargs): if callable(label): return self.decorator(label) else: return self.__class__(self.print_func, label, **kwargs) def decorator(self, func): @wraps(func) def inner(*args, **kwargs): # Recreate self with a new label so that nested and recursive calls will work cm = self.__class__.__new__(self.__class__) cm.__dict__.update(self.__dict__) cm.label = signature_repr(Call(func, args, kwargs), self.repr_len) with cm: return func(*args, **kwargs) return inner class log_errors(LabeledContextDecorator): """Logs or prints all errors within a function or block.""" def __init__(self, print_func, label=None, stack=True, repr_len=REPR_LEN): LabeledContextDecorator.__init__(self, print_func, label=label, repr_len=repr_len) self.stack = stack def __enter__(self): return self def __exit__(self, exc_type, exc_value, tb): if exc_type: if self.stack: exc_message = ''.join(traceback.format_exception(exc_type, exc_value, tb)) else: exc_message = '%s: %s' % (exc_type.__name__, exc_value) self.print_func(_format_error(self.label, exc_message, self.stack)) print_errors = log_errors(print) # Duration utils def format_time(sec): if sec < 1e-6: return '%8.2f ns' % (sec * 1e9) elif sec < 1e-3: return '%8.2f mks' % (sec * 1e6) elif sec < 1: return '%8.2f ms' % (sec * 1e3) else: return '%8.2f s' % sec time_formatters = { 'auto': format_time, 'ns': lambda sec: '%8.2f ns' % (sec * 1e9), 'mks': lambda sec: '%8.2f mks' % (sec * 1e6), 'ms': lambda sec: '%8.2f ms' % (sec * 1e3), 's': lambda sec: '%8.2f s' % sec, } class log_durations(LabeledContextDecorator): """Times each function call or block execution.""" def __init__(self, print_func, label=None, unit='auto', threshold=-1, repr_len=REPR_LEN): LabeledContextDecorator.__init__(self, print_func, label=label, repr_len=repr_len) if unit not in time_formatters: raise ValueError('Unknown time unit: %s. It should be ns, mks, ms, s or auto.' % unit) self.format_time = time_formatters[unit] self.threshold = threshold def __enter__(self): self.start = time.time() return self def __exit__(self, *exc): duration = time.time() - self.start if duration >= self.threshold: duration_str = self.format_time(duration) self.print_func("%s in %s" % (duration_str, self.label) if self.label else duration_str) print_durations = log_durations(print) def log_iter_durations(seq, print_func, label=None, unit='auto'): """Times processing of each item in seq.""" if unit not in time_formatters: raise ValueError('Unknown time unit: %s. It should be ns, mks, ms, s or auto.' % unit) _format_time = time_formatters[unit] suffix = " of %s" % label if label else "" it = iter(seq) for i, item in enumerate(it): start = time.time() yield item duration = _format_time(time.time() - start) print_func("%s in iteration %d%s" % (duration, i, suffix)) def print_iter_durations(seq, label=None, unit='auto'): """Times processing of each item in seq.""" return log_iter_durations(seq, print, label, unit=unit) ### Formatting utils def _format_error(label, e, stack=True): if isinstance(e, Exception): if stack: e_message = traceback.format_exc() else: e_message = '%s: %s' % (e.__class__.__name__, e) else: e_message = e if label: template = '%s raised in %s' if stack else '%s raised in %s' return template % (e_message, label) else: return e_message ### Call signature stringification utils def signature_repr(call, repr_len=REPR_LEN): if isinstance(call._func, partial): if hasattr(call._func.func, '__name__'): name = '<%s partial>' % call._func.func.__name__ else: name = '' else: name = getattr(call._func, '__name__', '') args_repr = (smart_repr(arg, repr_len) for arg in call._args) kwargs_repr = ('%s=%s' % (key, smart_repr(value, repr_len)) for key, value in call._kwargs.items()) return '%s(%s)' % (name, ', '.join(chain(args_repr, kwargs_repr))) def smart_repr(value, max_len=REPR_LEN): if isinstance(value, basestring): res = repr(value) else: res = str(value) res = re.sub(r'\s+', ' ', res) if max_len and len(res) > max_len: res = res[:max_len-3] + '...' return res funcy-1.17/funcy/decorators.py000066400000000000000000000275261416001257100164420ustar00rootroot00000000000000import sys import inspect from functools import partial from .compat import PY2 __all__ = ['decorator', 'wraps', 'unwrap', 'ContextDecorator', 'contextmanager'] def decorator(deco): """ Transforms a flat wrapper into decorator:: @decorator def func(call, methods, content_type=DEFAULT): # These are decorator params # Access call arg by name if call.request.method not in methods: # ... # Decorated functions and all the arguments are accesible as: print(call._func, call_args, call._kwargs) # Finally make a call: return call() """ if has_single_arg(deco): return make_decorator(deco) elif has_1pos_and_kwonly(deco): # Any arguments after first become decorator arguments # And a decorator with arguments is essentialy a decorator fab def decorator_fab(_func=None, **dkwargs): # TODO: make _func pos only in Python 3 if _func is not None: return make_decorator(deco, (), dkwargs)(_func) return make_decorator(deco, (), dkwargs) else: def decorator_fab(*dargs, **dkwargs): return make_decorator(deco, dargs, dkwargs) return wraps(deco)(decorator_fab) def make_decorator(deco, dargs=(), dkwargs={}): @wraps(deco) def _decorator(func): def wrapper(*args, **kwargs): call = Call(func, args, kwargs) return deco(call, *dargs, **dkwargs) return wraps(func)(wrapper) # NOTE: should I update name to show args? # Save these for introspection _decorator._func, _decorator._args, _decorator._kwargs = deco, dargs, dkwargs return _decorator class Call(object): """ A call object to pass as first argument to decorator. Call object is just a proxy for decorated function with call arguments saved in its attributes. """ def __init__(self, func, args, kwargs): self._func, self._args, self._kwargs = func, args, kwargs def __call__(self, *a, **kw): if not a and not kw: return self._func(*self._args, **self._kwargs) else: return self._func(*(self._args + a), **dict(self._kwargs, **kw)) def __getattr__(self, name): try: res = self.__dict__[name] = arggetter(self._func)(name, self._args, self._kwargs) return res except TypeError as e: raise AttributeError(*e.args) def __str__(self): func = getattr(self._func, '__qualname__', str(self._func)) args = ", ".join(list(map(str, self._args)) + ["%s=%s" % t for t in self._kwargs.items()]) return "%s(%s)" % (func, args) def __repr__(self): return "" % self if PY2: def has_single_arg(func): spec = inspect.getargspec(func) return len(spec.args) == 1 and not spec.varargs and not spec.keywords def has_1pos_and_kwonly(func): spec = inspect.getargspec(func) return len(spec.args) == 1 and not spec.varargs else: from collections import Counter from inspect import Parameter as P def has_single_arg(func): sig = inspect.signature(func) if len(sig.parameters) != 1: return False arg = next(iter(sig.parameters.values())) return arg.kind not in (arg.VAR_POSITIONAL, arg.VAR_KEYWORD) def has_1pos_and_kwonly(func): sig = inspect.signature(func) kinds = Counter(p.kind for p in sig.parameters.values()) return kinds[P.POSITIONAL_ONLY] + kinds[P.POSITIONAL_OR_KEYWORD] == 1 \ and kinds[P.VAR_POSITIONAL] == 0 def get_argnames(func): func = getattr(func, '__original__', None) or unwrap(func) return func.__code__.co_varnames[:func.__code__.co_argcount] def arggetter(func, _cache={}): if func in _cache: return _cache[func] original = getattr(func, '__original__', None) or unwrap(func) argnames = get_argnames(original) indexes = dict((name, i) for i, name in enumerate(argnames)) defaults_tuple = original.__defaults__ if defaults_tuple: defaults = dict(zip(argnames[-len(defaults_tuple):], defaults_tuple)) else: defaults = {} def get_arg(name, args, kwargs): if name not in indexes: raise TypeError("%s() doesn't have argument named %s" % (func.__name__, name)) else: index = indexes[name] if index < len(args): return args[index] elif name in kwargs: return kwargs[name] elif name in defaults: return defaults[name] else: raise TypeError("%s() missing required argument: '%s'" % (func.__name__, name)) _cache[func] = get_arg return get_arg ### Backport python 3.4 contextlib utilities ### namely ContextDecorator and contextmanager (also producing decorator) if sys.version_info >= (3, 4): from contextlib import ContextDecorator, contextmanager else: class ContextDecorator(object): "A base class or mixin that enables context managers to work as decorators." def _recreate_cm(self): """Return a recreated instance of self. Allows an otherwise one-shot context manager like _GeneratorContextManager to support use as a decorator via implicit recreation. This is a private interface just for _GeneratorContextManager. See issue #11647 for details. """ return self def __call__(self, func): @wraps(func) def inner(*args, **kwds): with self._recreate_cm(): return func(*args, **kwds) return inner class _GeneratorContextManager(ContextDecorator): """Helper for @contextmanager decorator.""" def __init__(self, func, *args, **kwds): self.gen = func(*args, **kwds) self.func, self.args, self.kwds = func, args, kwds # Issue 19330: ensure context manager instances have good docstrings doc = getattr(func, "__doc__", None) if doc is None: doc = type(self).__doc__ self.__doc__ = doc # Unfortunately, this still doesn't provide good help output when # inspecting the created context manager instances, since pydoc # currently bypasses the instance docstring and shows the docstring # for the class instead. # See http://bugs.python.org/issue19404 for more details. def _recreate_cm(self): # _GCM instances are one-shot context managers, so the # CM must be recreated each time a decorated function is # called return self.__class__(self.func, *self.args, **self.kwds) def __enter__(self): try: return next(self.gen) except StopIteration: raise RuntimeError("generator didn't yield") def __exit__(self, type, value, traceback): if type is None: try: next(self.gen) except StopIteration: return else: raise RuntimeError("generator didn't stop") else: if value is None: # Need to force instantiation so we can reliably # tell if we get the same exception back value = type() try: self.gen.throw(type, value, traceback) raise RuntimeError("generator didn't stop after throw()") except StopIteration as exc: # Suppress the exception *unless* it's the same exception that # was passed to throw(). This prevents a StopIteration # raised inside the "with" statement from being suppressed return exc is not value except: # noqa # only re-raise if it's *not* the exception that was # passed to throw(), because __exit__() must not raise # an exception unless __exit__() itself failed. But throw() # has to raise the exception to signal propagation, so this # fixes the impedance mismatch between the throw() protocol # and the __exit__() protocol. # if sys.exc_info()[1] is not value: raise def contextmanager(func): """ A decorator helping to create context managers. Resulting functions also behave as decorators. A simple example:: @contextmanager def tag(name): print("<%s>" % name) yield print("" % name) with tag("h1"): print "foo", # ->

foo

Using as decorator:: @tag('strong') def shout(text): print(text.upper()) shout('hooray') # -> HOORAY """ @wraps(func) def helper(*args, **kwds): return _GeneratorContextManager(func, *args, **kwds) return helper ### Fix functools.wraps to make it safely work with callables without all the attributes ### We also add __original__ to it from functools import WRAPPER_ASSIGNMENTS, WRAPPER_UPDATES def update_wrapper(wrapper, wrapped, assigned = WRAPPER_ASSIGNMENTS, updated = WRAPPER_UPDATES): for attr in assigned: try: value = getattr(wrapped, attr) except AttributeError: pass else: setattr(wrapper, attr, value) for attr in updated: getattr(wrapper, attr).update(getattr(wrapped, attr, {})) # Set it after to not gobble it in __dict__ update wrapper.__wrapped__ = wrapped # Set an original ref for faster and more convenient access wrapper.__original__ = getattr(wrapped, '__original__', None) or unwrap(wrapped) # Return the wrapper so this can be used as a decorator via partial() return wrapper def wraps(wrapped, assigned = WRAPPER_ASSIGNMENTS, updated = WRAPPER_UPDATES): """ An utility to pass function metadata from wrapped function to a wrapper. Copies all function attributes including ``__name__``, ``__module__`` and ``__doc__``. In addition adds ``__wrapped__`` attribute referring to the wrapped function and ``__original__`` attribute referring to innermost wrapped one. Mostly used to create decorators:: def some_decorator(func): @wraps(func) def wrapper(*args, **kwargs): do_something(*args, **kwargs) return func(*args, **kwargs) return wrapper But see also :func:`@decorator` for that. This is extended version of :func:`functools.wraps`. """ return partial(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated) ### Backport of python 3.4 inspect.unwrap utility try: from inspect import unwrap except ImportError: # A simplified version, no stop keyword-only argument def unwrap(func): """ Get the object wrapped by ``func``. Follows the chain of :attr:`__wrapped__` attributes returning the last object in the chain. This is a backport from python 3.4. """ f = func # remember the original func for error reporting memo = set([id(f)]) # Memoise by id to tolerate non-hashable objects while hasattr(func, '__wrapped__'): func = func.__wrapped__ id_func = id(func) if id_func in memo: raise ValueError('wrapper loop when unwrapping {!r}'.format(f)) memo.add(id_func) return func funcy-1.17/funcy/flow.py000066400000000000000000000204611416001257100152330ustar00rootroot00000000000000from datetime import datetime, timedelta import time import threading from .compat import map, range, raise_from, Hashable from .decorators import decorator, wraps, get_argnames, arggetter, contextmanager __all__ = ['raiser', 'ignore', 'silent', 'suppress', 'nullcontext', 'reraise', 'retry', 'fallback', 'limit_error_rate', 'ErrorRateExceeded', 'throttle', 'post_processing', 'collecting', 'joining', 'once', 'once_per', 'once_per_args', 'wrap_with'] ### Error handling utilities def raiser(exception_or_class=Exception, *args, **kwargs): """Constructs function that raises the given exception with given arguments on any invocation.""" if isinstance(exception_or_class, str): exception_or_class = Exception(exception_or_class) def _raiser(*a, **kw): if args or kwargs: raise exception_or_class(*args, **kwargs) else: raise exception_or_class return _raiser # Not using @decorator here for speed, # since @ignore and @silent should be used for very simple and fast functions def ignore(errors, default=None): """Alters function to ignore given errors, returning default instead.""" errors = _ensure_exceptable(errors) def decorator(func): @wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except errors: return default return wrapper return decorator def silent(func): """Alters function to ignore all exceptions.""" return ignore(Exception)(func) ### Backport of Python 3.4 suppress try: from contextlib import suppress except ImportError: class suppress(object): """Context manager to suppress specified exceptions After the exception is suppressed, execution proceeds with the next statement following the with statement. """ def __init__(self, *exceptions): self._exceptions = exceptions def __enter__(self): pass def __exit__(self, exctype, excinst, exctb): # Unlike isinstance and issubclass, CPython exception handling # currently only looks at the concrete type hierarchy (ignoring # the instance and subclass checking hooks). While Guido considers # that a bug rather than a feature, it's a fairly hard one to fix # due to various internal implementation details. suppress provides # the simpler issubclass based semantics, rather than trying to # exactly reproduce the limitations of the CPython interpreter. # # See http://bugs.python.org/issue12029 for more details return exctype is not None and issubclass(exctype, self._exceptions) ### Backport of Python 3.7 nullcontext try: from contextlib import nullcontext except ImportError: class nullcontext(object): """Context manager that does no additional processing. Used as a stand-in for a normal context manager, when a particular block of code is only sometimes used with a normal context manager: cm = optional_cm if condition else nullcontext() with cm: # Perform operation, using optional_cm if condition is True """ def __init__(self, enter_result=None): self.enter_result = enter_result def __enter__(self): return self.enter_result def __exit__(self, *excinfo): pass @contextmanager def reraise(errors, into): """Reraises errors as other exception.""" errors = _ensure_exceptable(errors) try: yield except errors as e: if callable(into) and not _is_exception_type(into): into = into(e) raise_from(into, e) @decorator def retry(call, tries, errors=Exception, timeout=0, filter_errors=None): """Makes decorated function retry up to tries times. Retries only on specified errors. Sleeps timeout or timeout(attempt) seconds between tries.""" errors = _ensure_exceptable(errors) for attempt in range(tries): try: return call() except errors as e: if not (filter_errors is None or filter_errors(e)): raise # Reraise error on last attempt if attempt + 1 == tries: raise else: timeout_value = timeout(attempt) if callable(timeout) else timeout if timeout_value > 0: time.sleep(timeout_value) def fallback(*approaches): """Tries several approaches until one works. Each approach has a form of (callable, expected_errors).""" for approach in approaches: func, catch = (approach, Exception) if callable(approach) else approach catch = _ensure_exceptable(catch) try: return func() except catch: pass def _ensure_exceptable(errors): """Ensures that errors are passable to except clause. I.e. should be BaseException subclass or a tuple.""" return errors if _is_exception_type(errors) else tuple(errors) def _is_exception_type(value): return isinstance(value, type) and issubclass(value, BaseException) class ErrorRateExceeded(Exception): pass def limit_error_rate(fails, timeout, exception=ErrorRateExceeded): """If function fails to complete fails times in a row, calls to it will be intercepted for timeout with exception raised instead.""" if isinstance(timeout, int): timeout = timedelta(seconds=timeout) def decorator(func): @wraps(func) def wrapper(*args, **kwargs): if wrapper.blocked: if datetime.now() - wrapper.blocked < timeout: raise exception else: wrapper.blocked = None try: result = func(*args, **kwargs) except: # noqa wrapper.fails += 1 if wrapper.fails >= fails: wrapper.blocked = datetime.now() raise else: wrapper.fails = 0 return result wrapper.fails = 0 wrapper.blocked = None return wrapper return decorator def throttle(period): """Allows only one run in a period, the rest is skipped""" if isinstance(period, timedelta): period = period.total_seconds() def decorator(func): @wraps(func) def wrapper(*args, **kwargs): now = time.time() if wrapper.blocked_until and wrapper.blocked_until > now: return wrapper.blocked_until = now + period return func(*args, **kwargs) wrapper.blocked_until = None return wrapper return decorator ### Post processing decorators @decorator def post_processing(call, func): """Post processes decorated function result with func.""" return func(call()) collecting = post_processing(list) collecting.__name__ = 'collecting' collecting.__doc__ = "Transforms a generator into list returning function." @decorator def joining(call, sep): """Joins decorated function results with sep.""" return sep.join(map(sep.__class__, call())) ### Initialization helpers def once_per(*argnames): """Call function only once for every combination of the given arguments.""" def once(func): lock = threading.Lock() done_set = set() done_list = list() get_arg = arggetter(func) @wraps(func) def wrapper(*args, **kwargs): with lock: values = tuple(get_arg(name, args, kwargs) for name in argnames) if isinstance(values, Hashable): done, add = done_set, done_set.add else: done, add = done_list, done_list.append if values not in done: add(values) return func(*args, **kwargs) return wrapper return once once = once_per() once.__doc__ = "Let function execute once, noop all subsequent calls." def once_per_args(func): """Call function once for every combination of values of its arguments.""" return once_per(*get_argnames(func))(func) @decorator def wrap_with(call, ctx): """Turn context manager into a decorator""" with ctx: return call() funcy-1.17/funcy/funcmakers.py000066400000000000000000000016021416001257100164160ustar00rootroot00000000000000from operator import itemgetter from .compat import basestring, Mapping, Set from .strings import re_tester, re_finder, _re_type __all__ = ('make_func', 'make_pred') def make_func(f, builtin=False, test=False): if callable(f): return f elif f is None: # pass None to builtin as predicate or mapping function for speed return None if builtin else \ bool if test else lambda x: x elif isinstance(f, (basestring, _re_type)): return re_tester(f) if test else re_finder(f) elif isinstance(f, (int, slice)): return itemgetter(f) elif isinstance(f, Mapping): return f.__getitem__ elif isinstance(f, Set): return f.__contains__ else: raise TypeError("Can't make a func from %s" % f.__class__.__name__) def make_pred(pred, builtin=False): return make_func(pred, builtin=builtin, test=True) funcy-1.17/funcy/funcolls.py000066400000000000000000000014071416001257100161100ustar00rootroot00000000000000from .funcs import compose, juxt from .colls import some, none, one __all__ = ['all_fn', 'any_fn', 'none_fn', 'one_fn', 'some_fn'] def all_fn(*fs): """Constructs a predicate, which holds when all fs hold.""" return compose(all, juxt(*fs)) def any_fn(*fs): """Constructs a predicate, which holds when any fs holds.""" return compose(any, juxt(*fs)) def none_fn(*fs): """Constructs a predicate, which holds when none of fs hold.""" return compose(none, juxt(*fs)) def one_fn(*fs): """Constructs a predicate, which holds when exactly one of fs holds.""" return compose(one, juxt(*fs)) def some_fn(*fs): """Constructs a function, which calls fs one by one and returns first truthy result.""" return compose(some, juxt(*fs)) funcy-1.17/funcy/funcs.py000066400000000000000000000104211416001257100153750ustar00rootroot00000000000000from operator import __not__ from functools import partial, reduce from .compat import map from ._inspect import get_spec, Spec from .primitives import EMPTY from .funcmakers import make_func, make_pred __all__ = ['identity', 'constantly', 'caller', 'partial', 'rpartial', 'func_partial', 'curry', 'rcurry', 'autocurry', 'iffy', 'compose', 'rcompose', 'complement', 'juxt', 'ljuxt'] def identity(x): """Returns its argument.""" return x def constantly(x): """Creates a function accepting any args, but always returning x.""" return lambda *a, **kw: x # an operator.methodcaller() brother def caller(*a, **kw): """Creates a function calling its sole argument with given *a, **kw.""" return lambda f: f(*a, **kw) def func_partial(func, *args, **kwargs): """A functools.partial alternative, which returns a real function. Can be used to construct methods.""" return lambda *a, **kw: func(*(args + a), **dict(kwargs, **kw)) def rpartial(func, *args, **kwargs): """Partially applies last arguments. New keyworded arguments extend and override kwargs.""" return lambda *a, **kw: func(*(a + args), **dict(kwargs, **kw)) def curry(func, n=EMPTY): """Curries func into a chain of one argument functions.""" if n is EMPTY: n = get_spec(func).max_n if n <= 1: return func elif n == 2: return lambda x: lambda y: func(x, y) else: return lambda x: curry(partial(func, x), n - 1) def rcurry(func, n=EMPTY): """Curries func into a chain of one argument functions. Arguments are passed from right to left.""" if n is EMPTY: n = get_spec(func).max_n if n <= 1: return func elif n == 2: return lambda x: lambda y: func(y, x) else: return lambda x: rcurry(rpartial(func, x), n - 1) # TODO: drop `n` in next major release def autocurry(func, n=EMPTY, _spec=None, _args=(), _kwargs={}): """Creates a version of func returning its partial applications until sufficient arguments are passed.""" spec = _spec or (get_spec(func) if n is EMPTY else Spec(n, set(), n, set(), False)) def autocurried(*a, **kw): args = _args + a kwargs = _kwargs.copy() kwargs.update(kw) if not spec.kw and len(args) + len(kwargs) >= spec.max_n: return func(*args, **kwargs) elif len(args) + len(set(kwargs) & spec.names) >= spec.max_n: return func(*args, **kwargs) elif len(args) + len(set(kwargs) & spec.req_names) >= spec.req_n: try: return func(*args, **kwargs) except TypeError: return autocurry(func, _spec=spec, _args=args, _kwargs=kwargs) else: return autocurry(func, _spec=spec, _args=args, _kwargs=kwargs) return autocurried def iffy(pred, action=EMPTY, default=identity): """Creates a function, which conditionally applies action or default.""" if action is EMPTY: return iffy(bool, pred, default) else: pred = make_pred(pred) action = make_func(action) return lambda v: action(v) if pred(v) else \ default(v) if callable(default) else \ default def compose(*fs): """Composes passed functions.""" if fs: pair = lambda f, g: lambda *a, **kw: f(g(*a, **kw)) return reduce(pair, map(make_func, fs)) else: return identity def rcompose(*fs): """Composes functions, calling them from left to right.""" return compose(*reversed(fs)) def complement(pred): """Constructs a complementary predicate.""" return compose(__not__, pred) # NOTE: using lazy map in these two will result in empty list/iterator # from all calls to i?juxt result since map iterator will be depleted def ljuxt(*fs): """Constructs a juxtaposition of the given functions. Result returns a list of results of fs.""" extended_fs = list(map(make_func, fs)) return lambda *a, **kw: [f(*a, **kw) for f in extended_fs] def juxt(*fs): """Constructs a lazy juxtaposition of the given functions. Result returns an iterator of results of fs.""" extended_fs = list(map(make_func, fs)) return lambda *a, **kw: (f(*a, **kw) for f in extended_fs) funcy-1.17/funcy/objects.py000066400000000000000000000070401416001257100157130ustar00rootroot00000000000000from inspect import isclass, ismodule from .compat import PY2 from .colls import walk_values from .funcs import iffy from .strings import cut_prefix __all__ = ['cached_property', 'cached_readonly', 'wrap_prop', 'monkey', 'namespace', 'LazyObject'] class cached_property(object): """ Decorator that converts a method with a single self argument into a property cached on the instance. """ # NOTE: implementation borrowed from Django. # NOTE: we use fget, fset and fdel attributes to mimic @property. fset = fdel = None def __init__(self, fget): self.fget = fget self.__doc__ = getattr(fget, '__doc__') def __get__(self, instance, type=None): if instance is None: return self res = instance.__dict__[self.fget.__name__] = self.fget(instance) return res class cached_readonly(cached_property): """Same as @cached_property, but protected against rewrites.""" def __set__(self, instance, value): raise AttributeError("property is read-only") def wrap_prop(ctx): """Wrap a property accessors with a context manager""" def decorator(prop): class WrapperProp(object): def __repr__(self): return repr(prop) def __get__(self, instance, type=None): if instance is None: return self with ctx: return prop.__get__(instance, type) if hasattr(prop, '__set__'): def __set__(self, name, value): with ctx: return prop.__set__(name, value) if hasattr(prop, '__del__'): def __del__(self, name): with ctx: return prop.__del__(name) return WrapperProp() return decorator def monkey(cls, name=None): """ Monkey patches class or module by adding to it decorated function. Anything overwritten could be accessed via .original attribute of decorated object. """ assert isclass(cls) or ismodule(cls), "Attempting to monkey patch non-class and non-module" def decorator(value): func = getattr(value, 'fget', value) # Support properties func_name = name or cut_prefix(func.__name__, '%s__' % cls.__name__) func.__name__ = func_name func.original = getattr(cls, func_name, None) setattr(cls, func_name, value) return value return decorator # TODO: monkey_mix()? class namespace_meta(type): def __new__(cls, name, bases, attrs): attrs = walk_values(iffy(callable, staticmethod), attrs) return super(namespace_meta, cls).__new__(cls, name, bases, attrs) class namespace(object): """A base class that prevents its member functions turning into methods.""" if PY2: __metaclass__ = namespace_meta class LazyObject(object): """ A simplistic lazy init object. Rewrites itself when any attribute is accessed. """ # NOTE: we can add lots of magic methods here to intercept on more events, # this is postponed. As well as metaclass to support isinstance() check. def __init__(self, init): self.__dict__['_init'] = init def _setup(self): obj = self._init() object.__setattr__(self, '__class__', obj.__class__) object.__setattr__(self, '__dict__', obj.__dict__) def __getattr__(self, name): self._setup() return getattr(self, name) def __setattr__(self, name, value): self._setup() return setattr(self, name, value) funcy-1.17/funcy/primitives.py000066400000000000000000000006301416001257100164530ustar00rootroot00000000000000__all__ = ['isnone', 'notnone', 'inc', 'dec', 'even', 'odd'] class EmptyType: def __repr__(self): return 'EMPTY' EMPTY = EmptyType() # Used as unique default for optional arguments def isnone(x): return x is None def notnone(x): return x is not None def inc(x): return x + 1 def dec(x): return x - 1 def even(x): return x % 2 == 0 def odd(x): return x % 2 == 1 funcy-1.17/funcy/py2.py000066400000000000000000000033121416001257100147720ustar00rootroot00000000000000""" Rewrite function names to represent Python 2 list-by-default interface. Iterator versions go with i prefix. """ import sys from . import py3 from .py3 import * # noqa from .py3 import __all__ from .compat import zip as izip # noqa, reexport # NOTE: manually renaming these to make PyCharm happy. # Not renaming lversions manually to not shade original definition. # Why it's shaded by rename? PyCharm only knows... from .py3 import (map as imap, filter as ifilter, remove as iremove, keep as ikeep, # noqa without as iwithout, concat as iconcat, cat as icat, flatten as iflatten, mapcat as imapcat, distinct as idistinct, split as isplit, split_at as isplit_at, split_by as isplit_by, partition as ipartition, chunks as ichunks, partition_by as ipartition_by, reductions as ireductions, sums as isums, juxt as ijuxt, tree_leaves as itree_leaves, tree_nodes as itree_nodes, where as iwhere, pluck as ipluck, pluck_attr as ipluck_attr, linvoke as invoke) RENAMES = {} for name in ('map', 'filter', 'remove', 'keep', 'without', 'concat', 'cat', 'flatten', 'mapcat', 'distinct', 'split', 'split_at', 'split_by', 'partition', 'chunks', 'partition_by', 'reductions', 'sums', 'juxt', 'tree_leaves', 'tree_nodes', 'where', 'pluck', 'pluck_attr', 'invoke'): RENAMES['l' + name] = name RENAMES[name] = 'i' + name RENAMES['zip_values'] = 'izip_values' RENAMES['zip_dicts'] = 'izip_dicts' # HACK: list concat instead of .append() to not trigger PyCharm __all__ = [RENAMES.get(name, name) for name in __all__ if name != 'lzip'] + ['izip'] py2 = sys.modules[__name__] for old, new in RENAMES.items(): setattr(py2, new, getattr(py3, old)) funcy-1.17/funcy/py3.py000066400000000000000000000014351416001257100147770ustar00rootroot00000000000000import sys from .calc import * from .colls import * from .tree import * from .decorators import * from .funcolls import * from .funcs import * from .seqs import * from .types import * from .strings import * from .flow import * from .objects import * from .debug import * from .primitives import * # Setup __all__ modules = ('calc', 'colls', 'tree', 'decorators', 'funcolls', 'funcs', 'seqs', 'types', 'strings', 'flow', 'objects', 'debug', 'primitives') __all__ = lcat(sys.modules['funcy.' + m].__all__ for m in modules) # Python 2 style zip() for Python 3 from .compat import PY3 if PY3: def lzip(*seqs): """List zip() version.""" return list(zip(*seqs)) else: lzip = zip __all__ += ['lzip'] # HACK: using this instead of .append() to not trigger PyCharm funcy-1.17/funcy/seqs.py000066400000000000000000000347071416001257100152470ustar00rootroot00000000000000from itertools import islice, chain, tee, groupby, \ takewhile as _takewhile, dropwhile as _dropwhile from collections import defaultdict, deque import operator from .compat import map as _map, filter as _filter, lmap as _lmap, lfilter as _lfilter, \ zip, filterfalse, range, Sequence, PY2, PY3 from .primitives import EMPTY from .types import is_seqcont from .funcmakers import make_func, make_pred __all__ = [ 'count', 'cycle', 'repeat', 'repeatedly', 'iterate', 'take', 'drop', 'first', 'second', 'nth', 'last', 'rest', 'butlast', 'ilen', 'map', 'filter', 'lmap', 'lfilter', 'remove', 'lremove', 'keep', 'lkeep', 'without', 'lwithout', 'concat', 'lconcat', 'chain', 'cat', 'lcat', 'flatten', 'lflatten', 'mapcat', 'lmapcat', 'interleave', 'interpose', 'distinct', 'ldistinct', 'dropwhile', 'takewhile', 'split', 'lsplit', 'split_at', 'lsplit_at', 'split_by', 'lsplit_by', 'group_by', 'group_by_keys', 'group_values', 'count_by', 'count_reps', 'partition', 'lpartition', 'chunks', 'lchunks', 'partition_by', 'lpartition_by', 'with_prev', 'with_next', 'pairwise', 'reductions', 'lreductions', 'sums', 'lsums', 'accumulate', ] # Re-export from itertools import count, cycle, repeat def repeatedly(f, n=EMPTY): """Takes a function of no args, presumably with side effects, and returns an infinite (or length n) iterator of calls to it.""" _repeat = repeat(None) if n is EMPTY else repeat(None, n) return (f() for _ in _repeat) def iterate(f, x): """Returns an infinite iterator of `x, f(x), f(f(x)), ...`""" while True: yield x x = f(x) def take(n, seq): """Returns a list of first n items in the sequence, or all items if there are fewer than n.""" return list(islice(seq, n)) def drop(n, seq): """Skips first n items in the sequence, yields the rest.""" return islice(seq, n, None) def first(seq): """Returns the first item in the sequence. Returns None if the sequence is empty.""" return next(iter(seq), None) def second(seq): """Returns second item in the sequence. Returns None if there are less than two items in it.""" return first(rest(seq)) def nth(n, seq): """Returns nth item in the sequence or None if no such item exists.""" try: return seq[n] except IndexError: return None except TypeError: return next(islice(seq, n, None), None) def last(seq): """Returns the last item in the sequence or iterator. Returns None if the sequence is empty.""" try: return seq[-1] except IndexError: return None except TypeError: item = None for item in seq: pass return item def rest(seq): """Skips first item in the sequence, yields the rest.""" return drop(1, seq) def butlast(seq): """Iterates over all elements of the sequence but last.""" it = iter(seq) try: prev = next(it) except StopIteration: pass else: for item in it: yield prev prev = item def ilen(seq): """Consumes an iterable not reading it into memory and returns the number of items.""" # NOTE: implementation borrowed from http://stackoverflow.com/a/15112059/753382 counter = count() deque(zip(seq, counter), maxlen=0) # (consume at C speed) return next(counter) # TODO: tree-seq equivalent def lmap(f, *seqs): """An extended version of builtin map() returning a list. Derives a mapper from string, int, slice, dict or set.""" return _lmap(make_func(f, builtin=PY2), *seqs) def lfilter(pred, seq): """An extended version of builtin filter() returning a list. Derives a predicate from string, int, slice, dict or set.""" return _lfilter(make_pred(pred, builtin=PY2), seq) def map(f, *seqs): """An extended version of builtin map(). Derives a mapper from string, int, slice, dict or set.""" return _map(make_func(f, builtin=PY2), *seqs) def filter(pred, seq): """An extended version of builtin filter(). Derives a predicate from string, int, slice, dict or set.""" return _filter(make_pred(pred, builtin=PY2), seq) if PY2: # NOTE: Default imap() behaves strange when passed None as function, # returns 1-length tuples, which is inconvinient and incompatible with map(). # This version is more sane: map() compatible and suitable for our internal use. def xmap(f, *seqs): return _map(make_func(f), *seqs) else: xmap = map # This is already extended version from above def lremove(pred, seq): """Creates a list if items passing given predicate.""" return list(remove(pred, seq)) def remove(pred, seq): """Iterates items passing given predicate.""" return filterfalse(make_pred(pred, builtin=PY2), seq) def lkeep(f, seq=EMPTY): """Maps seq with f and keeps only truthy results. Simply lists truthy values in one argument version.""" return list(keep(f, seq)) def keep(f, seq=EMPTY): """Maps seq with f and iterates truthy results. Simply iterates truthy values in one argument version.""" if seq is EMPTY: return _filter(bool, f) else: return _filter(bool, xmap(f, seq)) def without(seq, *items): """Iterates over sequence skipping items.""" for value in seq: if value not in items: yield value def lwithout(seq, *items): """Removes items from sequence, preserves order.""" return list(without(seq, *items)) def lconcat(*seqs): """Concatenates several sequences.""" return list(chain(*seqs)) concat = chain def lcat(seqs): """Concatenates the sequence of sequences.""" return list(cat(seqs)) cat = chain.from_iterable def flatten(seq, follow=is_seqcont): """Flattens arbitrary nested sequence. Unpacks an item if follow(item) is truthy.""" for item in seq: if follow(item): # TODO: use `yield from` when Python 2 is dropped ;) for sub in flatten(item, follow): yield sub else: yield item def lflatten(seq, follow=is_seqcont): """Iterates over arbitrary nested sequence. Dives into when follow(item) is truthy.""" return list(flatten(seq, follow)) def lmapcat(f, *seqs): """Maps given sequence(s) and concatenates the results.""" return lcat(xmap(f, *seqs)) def mapcat(f, *seqs): """Maps given sequence(s) and chains the results.""" return cat(xmap(f, *seqs)) def interleave(*seqs): """Yields first item of each sequence, then second one and so on.""" return cat(zip(*seqs)) def interpose(sep, seq): """Yields items of the sequence alternating with sep.""" return drop(1, interleave(repeat(sep), seq)) def takewhile(pred, seq=EMPTY): """Yields sequence items until first predicate fail. Stops on first falsy value in one argument version.""" if seq is EMPTY: pred, seq = bool, pred else: pred = make_pred(pred) return _takewhile(pred, seq) def dropwhile(pred, seq=EMPTY): """Skips the start of the sequence passing pred (or just truthy), then iterates over the rest.""" if seq is EMPTY: pred, seq = bool, pred else: pred = make_pred(pred) return _dropwhile(pred, seq) def ldistinct(seq, key=EMPTY): """Removes duplicates from sequences, preserves order.""" return list(distinct(seq, key)) def distinct(seq, key=EMPTY): """Iterates over sequence skipping duplicates""" seen = set() # check if key is supplied out of loop for efficiency if key is EMPTY: for item in seq: if item not in seen: seen.add(item) yield item else: key = make_func(key) for item in seq: k = key(item) if k not in seen: seen.add(k) yield item def split(pred, seq): """Lazily splits items which pass the predicate from the ones that don't. Returns a pair (passed, failed) of respective iterators.""" pred = make_pred(pred) yes, no = deque(), deque() splitter = (yes.append(item) if pred(item) else no.append(item) for item in seq) def _split(q): while True: while q: yield q.popleft() try: next(splitter) except StopIteration: return return _split(yes), _split(no) def lsplit(pred, seq): """Splits items which pass the predicate from the ones that don't. Returns a pair (passed, failed) of respective lists.""" pred = make_pred(pred) yes, no = [], [] for item in seq: if pred(item): yes.append(item) else: no.append(item) return yes, no def split_at(n, seq): """Lazily splits the sequence at given position, returning a pair of iterators over its start and tail.""" a, b = tee(seq) return islice(a, n), islice(b, n, None) def lsplit_at(n, seq): """Splits the sequence at given position, returning a tuple of its start and tail.""" a, b = split_at(n, seq) return list(a), list(b) def split_by(pred, seq): """Lazily splits the start of the sequence, consisting of items passing pred, from the rest of it.""" a, b = tee(seq) return takewhile(pred, a), dropwhile(pred, b) def lsplit_by(pred, seq): """Splits the start of the sequence, consisting of items passing pred, from the rest of it.""" a, b = split_by(pred, seq) return list(a), list(b) def group_by(f, seq): """Groups given sequence items into a mapping f(item) -> [item, ...].""" f = make_func(f) result = defaultdict(list) for item in seq: result[f(item)].append(item) return result def group_by_keys(get_keys, seq): """Groups items having multiple keys into a mapping key -> [item, ...]. Item might be repeated under several keys.""" get_keys = make_func(get_keys) result = defaultdict(list) for item in seq: for k in get_keys(item): result[k].append(item) return result def group_values(seq): """Takes a sequence of (key, value) pairs and groups values by keys.""" result = defaultdict(list) for key, value in seq: result[key].append(value) return result def count_by(f, seq): """Counts numbers of occurrences of values of f() on elements of given sequence.""" f = make_func(f) result = defaultdict(int) for item in seq: result[f(item)] += 1 return result def count_reps(seq): """Counts number occurrences of each value in the sequence.""" result = defaultdict(int) for item in seq: result[item] += 1 return result # For efficiency we use separate implementation for cutting sequences (those capable of slicing) def _cut_seq(drop_tail, n, step, seq): limit = len(seq)-n+1 if drop_tail else len(seq) return (seq[i:i+n] for i in range(0, limit, step)) def _cut_iter(drop_tail, n, step, seq): it = iter(seq) pool = take(n, it) while True: if len(pool) < n: break yield pool pool = pool[step:] pool.extend(islice(it, step)) if not drop_tail: for item in _cut_seq(drop_tail, n, step, pool): yield item def _cut(drop_tail, n, step, seq=EMPTY): if seq is EMPTY: step, seq = n, step # NOTE: range() is capable of slicing in python 3, if isinstance(seq, Sequence) and (PY3 or not isinstance(seq, range)): return _cut_seq(drop_tail, n, step, seq) else: return _cut_iter(drop_tail, n, step, seq) def partition(n, step, seq=EMPTY): """Lazily partitions seq into parts of length n. Skips step items between parts if passed. Non-fitting tail is ignored.""" return _cut(True, n, step, seq) def lpartition(n, step, seq=EMPTY): """Partitions seq into parts of length n. Skips step items between parts if passed. Non-fitting tail is ignored.""" return list(partition(n, step, seq)) def chunks(n, step, seq=EMPTY): """Lazily chunks seq into parts of length n or less. Skips step items between parts if passed.""" return _cut(False, n, step, seq) def lchunks(n, step, seq=EMPTY): """Chunks seq into parts of length n or less. Skips step items between parts if passed.""" return list(chunks(n, step, seq)) def partition_by(f, seq): """Lazily partition seq into continuous chunks with constant value of f.""" f = make_func(f) for _, items in groupby(seq, f): yield items def lpartition_by(f, seq): """Partition seq into continuous chunks with constant value of f.""" return _lmap(list, partition_by(f, seq)) def with_prev(seq, fill=None): """Yields each item paired with its preceding: (item, prev).""" a, b = tee(seq) return zip(a, chain([fill], b)) def with_next(seq, fill=None): """Yields each item paired with its following: (item, next).""" a, b = tee(seq) next(b, None) return zip(a, chain(b, [fill])) # An itertools recipe # NOTE: this is the same as ipartition(2, 1, seq) only faster and with distinct name def pairwise(seq): """Yields all pairs of neighboring items in seq.""" a, b = tee(seq) next(b, None) return zip(a, b) # Use accumulate from itertools if available try: from itertools import accumulate def _reductions(f, seq, acc): last = acc for x in seq: last = f(last, x) yield last def reductions(f, seq, acc=EMPTY): if acc is EMPTY: return accumulate(seq) if f is operator.add else accumulate(seq, f) return _reductions(f, seq, acc) except ImportError: def reductions(f, seq, acc=EMPTY): it = iter(seq) if acc is EMPTY: try: last = next(it) except StopIteration: return yield last else: last = acc for x in it: last = f(last, x) yield last def accumulate(iterable, func=operator.add): """Return series of accumulated sums (or other binary function results).""" return reductions(func, iterable) reductions.__doc__ = """Yields intermediate reductions of seq by f.""" def lreductions(f, seq, acc=EMPTY): """Lists intermediate reductions of seq by f.""" return list(reductions(f, seq, acc)) def sums(seq, acc=EMPTY): """Yields partial sums of seq.""" return reductions(operator.add, seq, acc) def lsums(seq, acc=EMPTY): """Lists partial sums of seq.""" return lreductions(operator.add, seq, acc) funcy-1.17/funcy/strings.py000066400000000000000000000046061416001257100157600ustar00rootroot00000000000000import re from operator import methodcaller from .compat import map from .primitives import EMPTY __all__ = ['re_iter', 're_all', 're_find', 're_finder', 're_test', 're_tester', 'str_join', 'cut_prefix', 'cut_suffix'] def _make_getter(regex): if regex.groups == 0: return methodcaller('group') elif regex.groups == 1 and regex.groupindex == {}: return methodcaller('group', 1) elif regex.groupindex == {}: return methodcaller('groups') elif regex.groups == len(regex.groupindex): return methodcaller('groupdict') else: return lambda m: m _re_type = type(re.compile(r'')) def _prepare(regex, flags): if not isinstance(regex, _re_type): regex = re.compile(regex, flags) return regex, _make_getter(regex) def re_iter(regex, s, flags=0): """Iterates over matches of regex in s, presents them in simplest possible form""" regex, getter = _prepare(regex, flags) return map(getter, regex.finditer(s)) def re_all(regex, s, flags=0): """Lists all matches of regex in s, presents them in simplest possible form""" return list(re_iter(regex, s, flags)) def re_find(regex, s, flags=0): """Matches regex against the given string, returns the match in the simplest possible form.""" return re_finder(regex, flags)(s) def re_test(regex, s, flags=0): """Tests whether regex matches against s.""" return re_tester(regex, flags)(s) def re_finder(regex, flags=0): """Creates a function finding regex in passed string.""" regex, _getter = _prepare(regex, flags) getter = lambda m: _getter(m) if m else None return lambda s: getter(regex.search(s)) def re_tester(regex, flags=0): """Creates a predicate testing passed string with regex.""" if not isinstance(regex, _re_type): regex = re.compile(regex, flags) return lambda s: bool(regex.search(s)) def str_join(sep, seq=EMPTY): """Joins the given sequence with sep. Forces stringification of seq items.""" if seq is EMPTY: return str_join('', sep) else: return sep.join(map(sep.__class__, seq)) def cut_prefix(s, prefix): """Cuts prefix from given string if it's present.""" return s[len(prefix):] if s.startswith(prefix) else s def cut_suffix(s, suffix): """Cuts suffix from given string if it's present.""" return s[:-len(suffix)] if s.endswith(suffix) else s funcy-1.17/funcy/tree.py000066400000000000000000000021521416001257100152200ustar00rootroot00000000000000from collections import deque from .types import is_seqcont __all__ = ['tree_leaves', 'ltree_leaves', 'tree_nodes', 'ltree_nodes'] def tree_leaves(root, follow=is_seqcont, children=iter): """Iterates over tree leaves.""" q = deque([[root]]) while q: node_iter = iter(q.pop()) for sub in node_iter: if follow(sub): q.append(node_iter) q.append(children(sub)) break else: yield sub def ltree_leaves(root, follow=is_seqcont, children=iter): """Lists tree leaves.""" return list(tree_leaves(root, follow, children)) def tree_nodes(root, follow=is_seqcont, children=iter): """Iterates over all tree nodes.""" q = deque([[root]]) while q: node_iter = iter(q.pop()) for sub in node_iter: yield sub if follow(sub): q.append(node_iter) q.append(children(sub)) break def ltree_nodes(root, follow=is_seqcont, children=iter): """Lists all tree nodes.""" return list(tree_nodes(root, follow, children)) funcy-1.17/funcy/types.py000066400000000000000000000011451416001257100154260ustar00rootroot00000000000000from .compat import range, Mapping, Set, Sequence, Iterator, Iterable __all__ = ('isa', 'is_mapping', 'is_set', 'is_seq', 'is_list', 'is_tuple', 'is_seqcoll', 'is_seqcont', 'iterable', 'is_iter') def isa(*types): """ Creates a function checking if its argument is of any of given types. """ return lambda x: isinstance(x, types) is_mapping = isa(Mapping) is_set = isa(Set) is_seq = isa(Sequence) is_list = isa(list) is_tuple = isa(tuple) is_seqcoll = isa(list, tuple) is_seqcont = isa(list, tuple, Iterator, range) iterable = isa(Iterable) is_iter = isa(Iterator) funcy-1.17/setup.cfg000066400000000000000000000000771416001257100144100ustar00rootroot00000000000000[bdist_wheel] universal = 1 [metadata] license_file = LICENSE funcy-1.17/setup.py000066400000000000000000000027361416001257100143050ustar00rootroot00000000000000from setuptools import setup # Remove build status and move Gitter link under title for PyPi README = open('README.rst').read() \ .replace('|Build Status|', '', 1) setup( name='funcy', version=open('VERSION').read().strip(), author='Alexander Schepanovski', author_email='suor.web@gmail.com', description='A fancy and practical functional tools', long_description=README, long_description_content_type="text/x-rst", url='http://github.com/Suor/funcy', license='BSD', packages=['funcy'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Libraries :: Python Modules', 'Intended Audience :: Developers', ] ) funcy-1.17/test_requirements.txt000066400000000000000000000002431416001257100171050ustar00rootroot00000000000000pytest==6.2.5; python_version>='3.6' pytest==3.9.3; python_version<='3.5' more-itertools==4.0.0; python_version=='3.5' whatever==0.6 typing; python_version=='3.4' funcy-1.17/tests/000077500000000000000000000000001416001257100137255ustar00rootroot00000000000000funcy-1.17/tests/__init__.py000066400000000000000000000000001416001257100160240ustar00rootroot00000000000000funcy-1.17/tests/test_calc.py000066400000000000000000000071721416001257100162470ustar00rootroot00000000000000from math import sin, cos from datetime import timedelta import pytest from funcy.calc import * def test_memoize(): @memoize def inc(x): calls.append(x) return x + 1 calls = [] assert inc(0) == 1 assert inc(1) == 2 assert inc(0) == 1 assert calls == [0, 1] # using kwargs assert inc(x=0) == 1 assert inc(x=1) == 2 assert inc(x=0) == 1 assert calls == [0, 1, 0, 1] def test_memoize_args_kwargs(): @memoize def mul(x, by=1): calls.append((x, by)) return x * by calls = [] assert mul(0) == 0 assert mul(1) == 1 assert mul(0) == 0 assert calls == [(0, 1), (1, 1)] # more with kwargs assert mul(0, 1) == 0 assert mul(1, 1) == 1 assert mul(0, 1) == 0 assert calls == [(0, 1), (1, 1), (0, 1), (1, 1)] def test_memoize_memory(): @memoize def inc(x): calls.append(x) return x + 1 calls = [] inc(0) inc.memory.clear() inc(0) assert calls == [0, 0] def test_memoize_key_func(): @memoize(key_func=len) def inc(s): calls.append(s) return s * 2 calls = [] assert inc('a') == 'aa' assert inc('b') == 'aa' inc('ab') assert calls == ['a', 'ab'] def test_make_lookuper(): @make_lookuper def letter_index(): return ((c, i) for i, c in enumerate('abcdefghij')) assert letter_index('c') == 2 with pytest.raises(LookupError): letter_index('_') def test_make_lookuper_nested(): tables_built = [0] @make_lookuper def function_table(f): tables_built[0] += 1 return ((x, f(x)) for x in range(10)) assert function_table(sin)(5) == sin(5) assert function_table(cos)(3) == cos(3) assert function_table(sin)(3) == sin(3) assert tables_built[0] == 2 with pytest.raises(LookupError): function_table(cos)(-1) def test_silent_lookuper(): @silent_lookuper def letter_index(): return ((c, i) for i, c in enumerate('abcdefghij')) assert letter_index('c') == 2 assert letter_index('_') is None def test_silnent_lookuper_nested(): @silent_lookuper def function_table(f): return ((x, f(x)) for x in range(10)) assert function_table(sin)(5) == sin(5) assert function_table(cos)(-1) is None @pytest.mark.parametrize('typ', [pytest.param(int, id='int'), pytest.param(lambda s: timedelta(seconds=s), id='timedelta')]) def test_cache(typ): calls = [] @cache(timeout=typ(60)) def inc(x): calls.append(x) return x + 1 assert inc(0) == 1 assert inc(1) == 2 assert inc(0) == 1 assert calls == [0, 1] def test_cache_mixed_args(): @cache(timeout=60) def add(x, y): return x + y assert add(1, y=2) == 3 def test_cache_timedout(): calls = [] @cache(timeout=0) def inc(x): calls.append(x) return x + 1 assert inc(0) == 1 assert inc(1) == 2 assert inc(0) == 1 assert calls == [0, 1, 0] assert len(inc.memory) == 1 # Both call should be erased then one added def test_cache_invalidate(): calls = [] @cache(timeout=60) def inc(x): calls.append(x) return x + 1 assert inc(0) == 1 assert inc(1) == 2 assert inc(0) == 1 assert calls == [0, 1] inc.invalidate_all() assert inc(0) == 1 assert inc(1) == 2 assert inc(0) == 1 assert calls == [0, 1, 0, 1] inc.invalidate(1) assert inc(0) == 1 assert inc(1) == 2 assert inc(0) == 1 assert calls == [0, 1, 0, 1, 1] # ensure invalidate() is idempotent (doesn't raise KeyError on the 2nd call) inc.invalidate(0) inc.invalidate(0) funcy-1.17/tests/test_colls.py000066400000000000000000000226501416001257100164570ustar00rootroot00000000000000import pytest from itertools import chain, count from collections import defaultdict, namedtuple from whatever import _ from funcy.compat import Iterator from funcy.colls import * # Utilities def eq(a, b): return type(a) is type(b) and a == b \ and (a.default_factory == b.default_factory if isinstance(a, defaultdict) else True) def inc(x): return x + 1 def hinc(xs): return map(inc, xs) def test_empty(): assert eq(empty({'a': 1}), {}) assert eq(empty(defaultdict(int)), defaultdict(int)) assert empty(defaultdict(int)).default_factory == defaultdict(int).default_factory def test_empty_iter(): it = empty(iter([])) assert isinstance(it, Iterator) assert list(it) == [] def test_empty_quirks(): class A: FLAG = 1 assert empty(A.__dict__) == {} assert empty({}.keys()) == [] assert empty({}.values()) == [] assert empty({}.items()) == [] def test_iteritems(): assert list(iteritems([1,2])) == [1,2] assert list(iteritems((1,2))) == [1,2] assert list(iteritems({'a': 1})) == [('a', 1)] def test_itervalues(): assert list(itervalues([1,2])) == [1,2] assert list(itervalues((1,2))) == [1,2] assert list(itervalues({'a': 1})) == [1] def test_merge(): assert eq(merge({1: 2}, {3: 4}), {1: 2, 3: 4}) def test_join(): assert join([]) is None with pytest.raises(TypeError): join([1]) assert eq(join(['ab', '', 'cd']), 'abcd') assert eq(join([['a', 'b'], 'c']), list('abc')) assert eq(join([('a', 'b'), ('c',)]), tuple('abc')) assert eq(join([{'a': 1}, {'b': 2}]), {'a': 1, 'b': 2}) assert eq(join([{'a': 1}, {'a': 2}]), {'a': 2}) assert eq(join([{1,2}, {3}]), {1,2,3}) it1 = (x for x in range(2)) it2 = (x for x in range(5, 7)) joined = join([it1, it2]) assert isinstance(joined, Iterator) and list(joined) == [0,1,5,6] dd1 = defaultdict(int, a=1) dd2 = defaultdict(int, b=2) assert eq(join([dd1, dd2]), defaultdict(int, a=1, b=2)) def test_join_iter(): assert join(iter('abc')) == 'abc' assert join(iter([[1], [2]])) == [1, 2] assert eq(join(iter([{'a': 1}, {'b': 2}])), {'a': 1, 'b': 2}) assert eq(join(iter([{1,2}, {3}])), {1,2,3}) it1 = (x for x in range(2)) it2 = (x for x in range(5, 7)) chained = join(iter([it1, it2])) assert isinstance(chained, Iterator) and list(chained) == [0,1,5,6] def test_merge_with(): assert merge_with(list, {1: 1}, {1: 10, 2: 2}) == {1: [1, 10], 2: [2]} assert merge_with(sum, {1: 1}, {1: 10, 2: 2}) == {1: 11, 2: 2} # Also works for collection of pairs assert merge_with(sum, {1: 1}, {1: 10, 2: 2}.items()) == {1: 11, 2: 2} def test_join_with(): assert join_with(sum, ({n % 3: n} for n in range(5))) == {0: 3, 1: 5, 2: 2} def test_walk(): assert eq(walk(inc, [1,2,3]), [2,3,4]) assert eq(walk(inc, (1,2,3)), (2,3,4)) assert eq(walk(inc, {1,2,3}), {2,3,4}) assert eq(walk(hinc, {1:1,2:2,3:3}), {2:2,3:3,4:4}) def test_walk_iter(): it = walk(inc, chain([0], [1, 2])) assert isinstance(it, Iterator) and list(it) == [1,2,3] it = walk(inc, (i for i in [0,1,2])) assert isinstance(it, Iterator) and list(it) == [1,2,3] def test_walk_extended(): assert walk(None, {2, 3}) == {2, 3} assert walk(r'\d+', {'a2', '13b'}) == {'2', '13'} assert walk({'a': '1', 'b': '2'}, 'ab') == '12' assert walk({1, 2, 3}, (0, 1, 2)) == (False, True, True) def test_walk_keys(): assert walk_keys(str.upper, {'a': 1, 'b':2}) == {'A': 1, 'B': 2} assert walk_keys(r'\d', {'a1': 1, 'b2': 2}) == {'1': 1, '2': 2} def test_walk_values(): assert walk_values(_ * 2, {'a': 1, 'b': 2}) == {'a': 2, 'b': 4} assert walk_values(r'\d', {1: 'a1', 2: 'b2'}) == {1: '1', 2: '2'} def test_walk_values_defaultdict(): dd = defaultdict(lambda: 'hey', {1: 'a', 2: 'ab'}) walked_dd = walk_values(len, dd) assert walked_dd == {1: 1, 2: 2} # resulting default factory should be compose(len, lambda: 'hey') assert walked_dd[0] == 3 def test_select(): assert eq(select(_ > 1, [1,2,3]), [2,3]) assert eq(select(_ > 1, (1,2,3)), (2,3)) assert eq(select(_ > 1, {1,2,3}), {2,3}) assert eq(select(_[1] > 1, {'a':1,'b':2,'c':3}), {'b':2,'c':3}) assert select(_[1] > 1, defaultdict(int)) == {} def test_select_extended(): assert select(None, [2, 3, 0]) == [2, 3] assert select(r'\d', 'a23bn45') == '2345' assert select({1,2,3}, (0, 1, 2, 4, 1)) == (1, 2, 1) def test_select_keys(): assert select_keys(_[0] == 'a', {'a':1, 'b':2, 'ab':3}) == {'a': 1, 'ab':3} assert select_keys(r'^a', {'a':1, 'b':2, 'ab':3, 'ba': 4}) == {'a': 1, 'ab':3} def test_select_values(): assert select_values(_ % 2, {'a': 1, 'b': 2}) == {'a': 1} assert select_values(r'a', {1: 'a', 2: 'b'}) == {1: 'a'} def test_compact(): assert eq(compact([0, 1, None, 3]), [1, 3]) assert eq(compact((0, 1, None, 3)), (1, 3)) assert eq(compact({'a': None, 'b': 0, 'c': 1}), {'c': 1}) def test_is_distinct(): assert is_distinct('abc') assert not is_distinct('aba') assert is_distinct(['a', 'ab', 'abc'], key=len) assert not is_distinct(['ab', 'cb', 'ad'], key=0) def test_all(): assert all([1,2,3]) assert not all([1,2,'']) assert all(callable, [abs, open, int]) assert not all(_ < 3, [1,2,5]) def test_all_extended(): assert all(None, [1,2,3]) assert not all(None, [1,2,'']) assert all(r'\d', '125') assert not all(r'\d', '12.5') def test_any(): assert any([0, False, 3, '']) assert not any([0, False, '']) assert any(_ > 0, [1,2,0]) assert not any(_ < 0, [1,2,0]) def test_one(): assert one([0, False, 3, '']) assert not one([0, False, '']) assert not one([1, False, 'a']) assert one(_ > 0, [0,1]) assert not one(_ < 0, [0,1,2]) assert not one(_ > 0, [0,1,2]) def test_none(): assert none([0, False]) assert not none(_ < 0, [0, -1]) def test_some(): assert some([0, '', 2, 3]) == 2 assert some(_ > 3, range(10)) == 4 def test_zipdict(): assert zipdict([1, 2], 'ab') == {1: 'a', 2:'b'} assert zipdict('ab', count()) == {'a': 0, 'b': 1} def test_flip(): assert flip({'a':1, 'b':2}) == {1:'a', 2:'b'} def test_project(): assert project({'a':1, 'b':2, 'c': 3}, 'ac') == {'a':1, 'c': 3} dd = defaultdict(int, {'a':1, 'b':2, 'c': 3}) assert eq(project(dd, 'ac'), defaultdict(int, {'a':1, 'c': 3})) def test_omit(): assert omit({'a': 1, 'b': 2, 'c': 3}, 'ac') == {'b': 2} dd = defaultdict(int, {'a': 1, 'b': 2, 'c': 3}) assert eq(omit(dd, 'ac'), defaultdict(int, {'b': 2})) def test_zip_values(): assert list(zip_values({1: 10}, {1: 20, 2: 30})) == [(10, 20)] with pytest.raises(TypeError): list(zip_values()) def test_zip_dicts(): assert list(zip_dicts({1: 10}, {1: 20, 2: 30})) == [(1, (10, 20))] with pytest.raises(TypeError): list(zip_dicts()) def test_get_in(): d = { "a": { "b": "c", "d": "e", "f": { "g": "h" } }, "i": "j" } assert get_in(d, ["m"]) is None assert get_in(d, ["m", "n"], "foo") == "foo" assert get_in(d, ["i"]) == "j" assert get_in(d, ["a", "b"]) == "c" assert get_in(d, ["a", "f", "g"]) == "h" def test_get_in_list(): assert get_in([1, 2], [0]) == 1 assert get_in([1, 2], [3]) is None assert get_in({'x': [1, 2]}, ['x', 1]) == 2 def test_set_in(): d = { 'a': { 'b': 1, 'c': 2, }, 'd': 5 } d2 = set_in(d, ['a', 'c'], 7) assert d['a']['c'] == 2 assert d2['a']['c'] == 7 d3 = set_in(d, ['e', 'f'], 42) assert d3['e'] == {'f': 42} assert d3['a'] is d['a'] def test_set_in_list(): l = [{}, 1] l2 = set_in(l, [1], 7) assert l2 == [{}, 7] assert l2[0] is l[0] def test_update_in(): d = {'c': []} assert update_in(d, ['c'], len) == {'c': 0} d2 = update_in(d, ['a', 'b'], inc, default=0) assert d2['a']['b'] == 1 assert d2['c'] is d['c'] def test_del_in(): d = {'c': [1, 2, 3]} assert del_in(d, []) is d assert del_in(d, ['a', 'b']) is d assert del_in(d, ['c', 1]) == {'c': [1, 3]} with pytest.raises(TypeError): del_in(d, ['c', 'b']) def test_has_path(): d = { "a": { "b": "c", "d": "e", "f": { "g": "h" } }, "i": "j" } assert has_path(d, []) assert not has_path(d, ["m"]) assert not has_path(d, ["m", "n"]) assert has_path(d, ("i",)) assert has_path(d, ("a", "b")) assert has_path(d, ["a", "f", "g"]) def test_has_path_list(): assert has_path([1, 2], [0]) assert not has_path([1, 2], [3]) assert has_path({'x': [1, 2]}, ['x', 1]) def test_where(): data = [{'a': 1, 'b': 2}, {'a': 10, 'b': 2}] assert isinstance(where(data, a=1), Iterator) assert list(where(data, a=1)) == [{'a': 1, 'b': 2}] def test_lwhere(): data = [{'a': 1, 'b': 2}, {'a': 10, 'b': 2}] assert lwhere(data, a=1, b=2) == [{'a': 1, 'b': 2}] assert lwhere(data, b=2) == data # Test non-existent key assert lwhere(data, c=1) == [] def test_pluck(): data = [{'a': 1, 'b': 2}, {'a': 10, 'b': 2}] assert lpluck('a', data) == [1, 10] def test_pluck_attr(): TestObj = namedtuple('TestObj', ('id', 'name')) objs = [TestObj(1, 'test1'), TestObj(5, 'test2'), TestObj(10, 'test3')] assert lpluck_attr('id', objs) == [1, 5, 10] def test_invoke(): assert linvoke(['abc', 'def', 'b'], 'find', 'b') == [1, -1, 0] funcy-1.17/tests/test_debug.py000066400000000000000000000073051416001257100164310ustar00rootroot00000000000000import re from funcy.debug import * from funcy.flow import silent from funcy.py3 import lmap def test_tap(): assert capture(tap, 42) == '42\n' assert capture(tap, 42, label='Life and ...') == 'Life and ...: 42\n' def test_log_calls(): log = [] @log_calls(log.append) def f(x, y): return x + y f(1, 2) f('a', 'b') assert log == [ "Call f(1, 2)", "-> 3 from f(1, 2)", "Call f('a', 'b')", "-> 'ab' from f('a', 'b')", ] def test_print_calls(): def f(x, y): return x + y capture(print_calls(f), 1, 2) == "Call f(1, 2)\n-> 3 from f(1, 2)\n", capture(print_calls()(f), 1, 2) == "Call f(1, 2)\n-> 3 from f(1, 2)\n", def test_log_calls_raise(): log = [] @log_calls(log.append, stack=False) def f(): raise Exception('something bad') silent(f)() assert log == [ "Call f()", "-> Exception: something bad raised in f()", ] def test_log_errors(): log = [] @log_errors(log.append) def f(x): return 1 / x silent(f)(1) silent(f)(0) assert len(log) == 1 assert log[0].startswith('Traceback') assert re.search(r'ZeroDivisionError: .*\n raised in f\(0\)$', log[0]) def test_log_errors_manager(): log = [] try: with log_errors(log.append): 1 / 0 except ZeroDivisionError: pass try: with log_errors(log.append, 'name check', stack=False): hey except NameError: pass assert len(log) == 2 print(log) assert log[0].startswith('Traceback') assert re.search(r'ZeroDivisionError: .* zero\s*$', log[0]) assert not log[1].startswith('Traceback') assert re.search(r"NameError: (global )?name 'hey' is not defined raised in name check", log[1]) def test_print_errors(): def error(): 1 / 0 f = print_errors(error) assert f.__name__ == 'error' assert 'ZeroDivisionError' in capture(silent(f)) g = print_errors(stack=False)(error) assert g.__name__ == 'error' assert capture(silent(g)).startswith('ZeroDivisionError') def test_print_errors_manager(): @silent def f(): with print_errors: 1 / 0 assert 'ZeroDivisionError' in capture(f) assert capture(f).startswith('Traceback') def test_print_errors_recursion(): @silent @print_errors(stack=False) def f(n): if n: f(0) 1 / 0 assert 'f(1)' in capture(f, 1) def test_log_durations(monkeypatch): timestamps = iter([0, 0.01, 1, 1.000025]) monkeypatch.setattr('time.time', lambda: next(timestamps)) log = [] f = log_durations(log.append)(lambda: None) f() with log_durations(log.append, 'hello'): pass assert lmap(r'^\s*(\d+\.\d+ mk?s) in (?:\(\)|hello)$', log) == ['10.00 ms', '25.00 mks'] def test_log_durations_ex(monkeypatch): timestamps = [0, 0.01, 1, 1.001, 2, 2.02] timestamps_iter = iter(timestamps) monkeypatch.setattr('time.time', lambda: next(timestamps_iter)) log = [] f = log_durations(log.append, unit='ms', threshold=1.1e-3)(lambda: None) f(); f(); f() assert len(log) == 2 assert lmap(r'^\s*(\d+\.\d+) ms in', log) == ['10.00', '20.00'] def test_log_iter_dirations(): log = [] for item in log_iter_durations([1, 2], log.append): pass assert len(log) == 2 ### An utility to capture stdout import sys try: from cStringIO import StringIO except ImportError: from io import StringIO def capture(command, *args, **kwargs): out, sys.stdout = sys.stdout, StringIO() try: command(*args, **kwargs) sys.stdout.seek(0) return sys.stdout.read() finally: sys.stdout = out funcy-1.17/tests/test_decorators.py000066400000000000000000000063061416001257100175100ustar00rootroot00000000000000import pytest from funcy.decorators import * def test_decorator_no_args(): @decorator def inc(call): return call() + 1 @inc def ten(): return 10 assert ten() == 11 def test_decorator_with_args(): @decorator def add(call, n): return call() + n @add(2) def ten(): return 10 assert ten() == 12 def test_decorator_kw_only_args(): @decorator def add(call, **kwargs): # TODO: use real kw-only args in Python 3 return call() + kwargs.get("n", 1) def ten(a, b): return 10 # Should work with or without parentheses assert add(n=2)(ten)(1, 2) == 12 assert add()(ten)(1, 2) == 11 assert add(ten)(1, 2) == 11 def test_decorator_access_arg(): @decorator def multiply(call): return call() * call.n @multiply def square(n): return n assert square(5) == 25 def test_decorator_access_nonexistent_arg(): @decorator def return_x(call): return call.x @return_x def f(): pass with pytest.raises(AttributeError): f() def test_decorator_required_arg(): @decorator def deco(call): call.x @deco def f(x, y=42): pass with pytest.raises(AttributeError): f() def test_double_decorator_defaults(): @decorator def deco(call): return call.y @decorator def noop(call): return call() @deco @noop def f(x, y=1): pass assert f(42) == 1 def test_decorator_defaults(): @decorator def deco(call): return call.y, call.z @deco def f(x, y=1, z=2): pass assert f(42) == (1, 2) def test_decorator_with_method(): @decorator def inc(call): return call() + 1 class A(object): def ten(self): return 10 @classmethod def ten_cls(cls): return 10 @staticmethod def ten_static(): return 10 assert inc(A().ten)() == 11 assert inc(A.ten_cls)() == 11 assert inc(A.ten_static)() == 11 def test_decorator_with_method_descriptor(): @decorator def exclaim(call): return call() + '!' assert exclaim(str.upper)('hi') == 'HI!' def test_chain_arg_access(): @decorator def decor(call): return call.x + call() @decor @decor def func(x): return x assert func(2) == 6 def test_meta_attribtes(): @decorator def decor(call): return call() def func(x): "Some doc" return x decorated = decor(func) double_decorated = decor(decorated) assert decorated.__name__ == 'func' assert decorated.__module__ == __name__ assert decorated.__doc__ == "Some doc" assert decorated.__wrapped__ is func assert decorated.__original__ is func assert double_decorated.__wrapped__ is decorated assert double_decorated.__original__ is func def test_decorator_introspection(): @decorator def decor(call, x): return call() assert decor.__name__ == 'decor' decor_x = decor(42) assert decor_x.__name__ == 'decor' assert decor_x._func is decor.__wrapped__ assert decor_x._args == (42,) assert decor_x._kwargs == {} funcy-1.17/tests/test_flow.py000066400000000000000000000131261416001257100163100ustar00rootroot00000000000000from datetime import timedelta import pytest from funcy.flow import * def test_silent(): assert silent(int)(1) == 1 assert silent(int)('1') == 1 assert silent(int)('hello') is None assert silent(str.upper)('hello') == 'HELLO' class MyError(Exception): pass def test_ignore(): assert ignore(Exception)(raiser(Exception))() is None assert ignore(Exception)(raiser(MyError))() is None assert ignore((TypeError, MyError))(raiser(MyError))() is None with pytest.raises(TypeError): ignore(MyError)(raiser(TypeError))() assert ignore(MyError, default=42)(raiser(MyError))() == 42 def test_raiser(): with pytest.raises(Exception) as e: raiser()() assert e.type is Exception with pytest.raises(Exception, match="text") as e: raiser("text")() assert e.type is Exception with pytest.raises(MyError): raiser(MyError)() with pytest.raises(MyError, match="some message"): raiser(MyError('some message'))() with pytest.raises(MyError, match="some message") as e: raiser(MyError, 'some message')() with pytest.raises(MyError): raiser(MyError)('junk', keyword='junk') def test_suppress(): with suppress(Exception): raise Exception with suppress(Exception): raise MyError with pytest.raises(TypeError): with suppress(MyError): raise TypeError with suppress(TypeError, MyError): raise MyError def test_reraise(): @reraise((TypeError, ValueError), MyError) def erry(e): raise e with pytest.raises(MyError): erry(TypeError) with pytest.raises(MyError): erry(ValueError) with pytest.raises(MyError): with reraise(ValueError, MyError): raise ValueError with pytest.raises(TypeError): with reraise(ValueError, MyError): raise TypeError with pytest.raises(MyError, match="heyhey"): with reraise(ValueError, lambda e: MyError(str(e) * 2)): raise ValueError("hey") def test_retry(): with pytest.raises(MyError): _make_failing()() assert retry(2, MyError)(_make_failing())() == 1 with pytest.raises(MyError): retry(2, MyError)(_make_failing(n=2))() def test_retry_timeout(monkeypatch): timeouts = [] monkeypatch.setattr('time.sleep', timeouts.append) def failing(): raise MyError # sleep only between tries, so retry is 11, but sleep summary is ~0.1 sec del timeouts[:] with pytest.raises(MyError): retry(11, MyError, timeout=1)(failing)() assert timeouts == [1] * 10 # exponential timeout del timeouts[:] with pytest.raises(MyError): retry(4, MyError, timeout=lambda a: 2 ** a)(failing)() assert timeouts == [1, 2, 4] def test_retry_many_errors(): assert retry(2, (MyError, RuntimeError))(_make_failing())() == 1 assert retry(2, [MyError, RuntimeError])(_make_failing())() == 1 def test_retry_filter(): error_pred = lambda e: 'x' in str(e) retry_deco = retry(2, MyError, filter_errors=error_pred) assert retry_deco(_make_failing(e=MyError('x')))() == 1 with pytest.raises(MyError): retry_deco(_make_failing())() def _make_failing(n=1, e=MyError): calls = [] def failing(): if len(calls) < n: calls.append(1) raise e return 1 return failing def test_fallback(): assert fallback(raiser(), lambda: 1) == 1 with pytest.raises(Exception): fallback((raiser(), MyError), lambda: 1) assert fallback((raiser(MyError), MyError), lambda: 1) == 1 def test_limit_error_rate(): calls = [] @limit_error_rate(2, 60, MyError) def limited(x): calls.append(x) raise TypeError with pytest.raises(TypeError): limited(1) with pytest.raises(TypeError): limited(2) with pytest.raises(MyError): limited(3) assert calls == [1, 2] @pytest.mark.parametrize('typ', [pytest.param(int, id='int'), pytest.param(lambda s: timedelta(seconds=s), id='timedelta')]) def test_throttle(monkeypatch, typ): timestamps = iter([0, 0.01, 1, 1.000025]) monkeypatch.setattr('time.time', lambda: next(timestamps)) calls = [] @throttle(typ(1)) def throttled(x): calls.append(x) throttled(1) throttled(2) throttled(3) throttled(4) assert calls == [1, 3] def test_throttle_class(): class A: def foo(self): return 42 a = A() assert throttle(1)(a.foo)() == 42 def test_post_processing(): @post_processing(max) def my_max(l): return l assert my_max([1, 3, 2]) == 3 def test_collecting(): @collecting def doubles(l): for i in l: yield i * 2 assert doubles([1, 2]) == [2, 4] def test_once(): calls = [] @once def call(n): calls.append(n) return n call(1) call(2) assert calls == [1] def test_once_per(): calls = [] @once_per('n') def call(n, x=None): calls.append(n) return n call(1) call(2) call(1, 42) assert calls == [1, 2] def test_once_per_args(): calls = [] @once_per_args def call(n, x=None): calls.append(n) return n call(1) call(2) call(1, 42) assert calls == [1, 2, 1] call(1) assert calls == [1, 2, 1] def test_wrap_with(): calls = [] # Not using @contextmanager to not make this a decorator class Manager: def __enter__(self): calls.append(1) return self def __exit__(self, *args): pass @wrap_with(Manager()) def calc(): pass calc() assert calls == [1] funcy-1.17/tests/test_funcmakers.py000066400000000000000000000017411416001257100174770ustar00rootroot00000000000000from collections import defaultdict import pytest from funcy.funcmakers import * def test_callable(): assert make_func(lambda x: x + 42)(0) == 42 def test_int(): assert make_func(0)('abc') == 'a' assert make_func(2)([1,2,3]) == 3 assert make_func(1)({1: 'a'}) == 'a' with pytest.raises(IndexError): make_func(1)('a') with pytest.raises(TypeError): make_func(1)(42) def test_slice(): assert make_func(slice(1, None))('abc') == 'bc' def test_str(): assert make_func(r'\d+')('ab42c') == '42' assert make_func(r'\d+')('abc') is None assert make_pred(r'\d+')('ab42c') is True assert make_pred(r'\d+')('abc') is False def test_dict(): assert make_func({1: 'a'})(1) == 'a' with pytest.raises(KeyError): make_func({1: 'a'})(2) d = defaultdict(int, a=42) assert make_func(d)('a') == 42 assert make_func(d)('b') == 0 def test_set(): s = set([1,2,3]) assert make_func(s)(1) is True assert make_func(s)(4) is False funcy-1.17/tests/test_funcolls.py000066400000000000000000000012071416001257100171630ustar00rootroot00000000000000from whatever import _ from funcy.compat import lfilter from funcy.funcolls import * def test_all_fn(): assert lfilter(all_fn(_ > 3, _ % 2), range(10)) == [5, 7, 9] def test_any_fn(): assert lfilter(any_fn(_ > 3, _ % 2), range(10)) == [1, 3, 4, 5, 6, 7, 8, 9] def test_none_fn(): assert lfilter(none_fn(_ > 3, _ % 2), range(10)) == [0, 2] def test_one_fn(): assert lfilter(one_fn(_ > 3, _ % 2), range(10)) == [1, 3, 4, 6, 8] def test_some_fn(): assert some_fn(_-1, _*0, _+1, _*2)(1) == 2 def test_extended_fns(): f = any_fn(None, set([1,2,0])) assert f(1) assert f(0) assert f(10) assert not f('') funcy-1.17/tests/test_funcs.py000066400000000000000000000125201416001257100164540ustar00rootroot00000000000000from operator import __add__, __sub__ import pytest from whatever import _ from funcy.py2 import map, merge_with from funcy.funcs import * from funcy.seqs import keep def test_caller(): assert caller([1, 2])(sum) == 3 def test_constantly(): assert constantly(42)() == 42 assert constantly(42)('hi', 'there', volume='shout') == 42 def test_partial(): assert partial(__add__, 10)(1) == 11 assert partial(__add__, 'abra')('cadabra') == 'abracadabra' merge = lambda a=None, b=None: a + b assert partial(merge, a='abra')(b='cadabra') == 'abracadabra' assert partial(merge, b='abra')(a='cadabra') == 'cadabraabra' def test_func_partial(): class A(object): f = func_partial(lambda x, self: x + 1, 10) assert A().f() == 11 def test_rpartial(): assert rpartial(__sub__, 10)(1) == -9 assert rpartial(pow, 2, 85)(10) == 15 merge = lambda a, b, c='bra': a + b + c assert rpartial(merge, a='abra')(b='cada') == 'abracadabra' assert rpartial(merge, 'cada', c='fancy')('abra', c='funcy') == 'abracadafuncy' def test_curry(): assert curry(lambda: 42)() == 42 assert curry(_ * 2)(21) == 42 assert curry(_ * _)(6)(7) == 42 assert curry(__add__, 2)(10)(1) == 11 assert curry(__add__)(10)(1) == 11 # Introspect builtin assert curry(lambda x,y,z: x+y+z)('a')('b')('c') == 'abc' def test_curry_funcy(): # curry() doesn't handle required star args, # but we can code inspection for funcy utils. assert curry(map)(int)('123') == [1, 2, 3] assert curry(merge_with)(sum)({1: 1}) == {1: 1} def test_rcurry(): assert rcurry(__sub__, 2)(10)(1) == -9 assert rcurry(lambda x,y,z: x+y+z)('a')('b')('c') == 'cba' assert rcurry(str.endswith, 2)('c')('abc') is True def test_autocurry(): at = autocurry(lambda a, b, c: (a, b, c)) assert at(1)(2)(3) == (1, 2, 3) assert at(1, 2)(3) == (1, 2, 3) assert at(1)(2, 3) == (1, 2, 3) assert at(1, 2, 3) == (1, 2, 3) with pytest.raises(TypeError): at(1, 2, 3, 4) with pytest.raises(TypeError): at(1, 2)(3, 4) assert at(a=1, b=2, c=3) == (1, 2, 3) assert at(c=3)(1, 2) == (1, 2, 3) assert at(c=4)(c=3)(1, 2) == (1, 2, 3) with pytest.raises(TypeError): at(a=1)(1, 2, 3) def test_autocurry_named(): at = autocurry(lambda a, b, c=9: (a, b, c)) assert at(1)(2) == (1, 2, 9) assert at(1)(2, 3) == (1, 2, 3) assert at(a=1)(b=2) == (1, 2, 9) assert at(c=3)(1)(2) == (1, 2, 3) assert at(c=3, a=1, b=2) == (1, 2, 3) with pytest.raises(TypeError): at(b=2, c=9, d=42)(1) def test_autocurry_kwargs(): at = autocurry(lambda a, b, **kw: (a, b, kw)) assert at(1, 2) == (1, 2, {}) assert at(1)(c=9)(2) == (1, 2, {'c': 9}) assert at(c=9, d=5)(e=7)(1, 2) == (1, 2, {'c': 9, 'd': 5, 'e': 7}) at = autocurry(lambda a, b=2, c=3: (a, b, c)) assert at(1) == (1, 2, 3) assert at(a=1) == (1, 2, 3) assert at(c=9)(1) == (1, 2, 9) assert at(b=3, c=9)(1) == (1, 3, 9) with pytest.raises(TypeError): at(b=2, d=3, e=4)(a=1, c=1) def test_autocurry_builtin(): assert autocurry(complex)(imag=1)(0) == 1j assert autocurry(map)(_ + 1)([1, 2]) == [2, 3] assert autocurry(int)(base=12)('100') == 144 # Only works in newer Pythons, relies on inspect.signature() # assert autocurry(str.split)(sep='_')('a_1') == ['a', '1'] def test_autocurry_hard(): def required_star(f, *seqs): return map(f, *seqs) assert autocurry(required_star)(__add__)('12', 'ab') == ['1a', '2b'] _iter = autocurry(iter) assert list(_iter([1, 2])) == [1, 2] assert list(_iter([0, 1, 2].pop)(0)) == [2, 1] _keep = autocurry(keep) assert list(_keep('01')) == ['0', '1'] assert list(_keep(int)('01')) == [1] with pytest.raises(TypeError): _keep(1, 2, 3) def test_autocurry_class(): class A: def __init__(self, x, y=0): self.x, self.y = x, y assert autocurry(A)(1).__dict__ == {'x': 1, 'y': 0} class B: pass autocurry(B)() class I(int): pass assert autocurry(int)(base=12)('100') == 144 def test_compose(): double = _ * 2 inc = _ + 1 assert compose()(10) == 10 assert compose(double)(10) == 20 assert compose(inc, double)(10) == 21 assert compose(str, inc, double)(10) == '21' assert compose(int, r'\d+')('abc1234xy') == 1234 def test_rcompose(): double = _ * 2 inc = _ + 1 assert rcompose()(10) == 10 assert rcompose(double)(10) == 20 assert rcompose(inc, double)(10) == 22 assert rcompose(double, inc)(10) == 21 def test_complement(): assert complement(identity)(0) is True assert complement(identity)([1, 2]) is False def test_juxt(): assert ljuxt(__add__, __sub__)(10, 2) == [12, 8] assert map(ljuxt(_ + 1, _ - 1), [2, 3]) == [[3, 1], [4, 2]] def test_iffy(): assert map(iffy(_ % 2, _ * 2, _ / 2), [1,2,3,4]) == [2,1,6,2] assert map(iffy(_ % 2, _ * 2), [1,2,3,4]) == [2,2,6,4] assert map(iffy(_ * 2), [21, '', None]) == [42, '', None] assert map(iffy(_ % 2, _ * 2, None), [1,2,3,4]) == [2, None, 6, None] assert map(iffy(_ + 1, default=1), [1, None, 2]) == [2, 1, 3] assert map(iffy(set([1,4,5]), _ * 2), [1, 2, 3, 4]) == [2, 2, 3, 8] assert map(iffy(r'\d+', str.upper), ['a2', 'c']) == ['A2', 'c'] assert map(iffy(set([1,4,5])), [False, 2, 4]) == [False, False, True] assert map(iffy(None), [False, 2, 3, 4]) == [False, 2, 3, 4] funcy-1.17/tests/test_interface.py000066400000000000000000000051371416001257100173040ustar00rootroot00000000000000import pkgutil import pytest from funcy.compat import PY2, PY3 import funcy from funcy import py2, py3 from funcy.py3 import cat, lcat, count_reps, is_iter, is_list # Introspect all modules exclude = ('compat', 'cross', '_inspect', 'py2', 'py3', 'simple_funcs', 'funcmakers') module_names = list(name for _, name, _ in pkgutil.iter_modules(funcy.__path__) if name not in exclude) modules = [getattr(funcy, name) for name in module_names] def test_match(): assert funcy.__all__ == (py2 if PY2 else py3).__all__ @pytest.mark.skipif(PY2, reason="modules use python 3 internally") def test_full_py3(): assert sorted(funcy.__all__) == sorted(lcat(m.__all__ for m in modules) + ['lzip']) def test_full(): assert len(py2.__all__) == len(py3.__all__) def test_name_clashes(): counts = count_reps(cat(m.__all__ for m in modules)) clashes = [name for name, c in counts.items() if c > 1] assert not clashes, 'names clash for ' + ', '.join(clashes) def test_renames(): inames = [n for n in py2.__all__ if n.startswith('i')] ipairs = [n[1:] for n in inames if n[1:] in py2.__all__] for name in inames: if name != 'izip': assert name in py3.__all__ or name[1:] in py3.__all__ for name in ipairs: assert name in py3.__all__ assert 'l' + name in py3.__all__ lnames = [n for n in py3.__all__ if n.startswith('l')] lpairs = [n[1:] for n in lnames if n[1:] in py3.__all__] for name in lnames: if name != 'lzip': assert name in py2.__all__ or name[1:] in py2.__all__ for name in lpairs: assert name in py2.__all__ assert 'i' + name in py2.__all__ # Only inames a renamed assert set(py2.__all__) - set(py3.__all__) <= set(inames) # Only lnames a new, and zip_values/zip_dicts assert set(py3.__all__) - set(py2.__all__) <= set(lnames) | set(['zip_values', 'zip_dicts']) def test_docs(): exports = [(name, getattr(funcy, name)) for name in funcy.__all__ if name not in ('print_errors', 'print_durations', 'ErrorRateExceeded') and getattr(funcy, name).__module__ not in ('funcy.types', 'funcy.primitives')] # NOTE: we are testing this way and not with all() to immediately get a list of offenders assert [name for name, f in exports if f.__name__ in ('', '_decorator')] == [] assert [name for name, f in exports if f.__doc__ is None] == [] def test_list_iter(): assert is_list(py2.map(None, [])) assert is_iter(py3.map(None, [])) assert is_list(funcy.map(None, [])) == PY2 assert is_iter(funcy.map(None, [])) == PY3 funcy-1.17/tests/test_objects.py000066400000000000000000000055471416001257100170020ustar00rootroot00000000000000import sys import pytest from funcy.objects import * from funcy import suppress ### @cached_property def test_cached_property(): calls = [0] class A(object): @cached_property def prop(self): calls[0] += 1 return 7 a = A() assert a.prop == 7 assert a.prop == 7 assert calls == [1] a.prop = 42 assert a.prop == 42 del a.prop assert a.prop == 7 assert calls == [2] def test_cached_property_doc(): class A(object): @cached_property def prop(self): "prop doc" return 7 assert A.prop.__doc__ == "prop doc" def test_cached_readonly(): class A(object): @cached_readonly def prop(self): return 7 a = A() assert a.prop == 7 with pytest.raises(AttributeError): a.prop = 8 def test_wrap_prop(): calls = [] # Not using @contextmanager to not make this a decorator class Manager: def __init__(self, name): self.name = name def __enter__(self): calls.append(self.name) return self def __exit__(self, *args): pass class A(object): @wrap_prop(Manager('p')) @property def prop(self): return 1 @wrap_prop(Manager('cp')) @cached_property def cached_prop(self): return 1 a = A() assert a.prop and calls == ['p'] assert a.prop and calls == ['p', 'p'] assert a.cached_prop and calls == ['p', 'p', 'cp'] assert a.cached_prop and calls == ['p', 'p', 'cp'] # Wrap __set__ for data props a = A() calls[:] = [] with suppress(AttributeError): a.prop = 2 assert calls == ['p'] # Do not wrap __set__ for non-data props a.cached_property = 2 assert calls == ['p'] ### Monkey tests def test_monkey(): class A(object): def f(self): return 7 @monkey(A) def f(self): return f.original(self) * 6 assert A().f() == 42 def test_monkey_with_name(): class A(object): def f(self): return 7 @monkey(A, name='f') def g(self): return g.original(self) * 6 assert A().f() == 42 def test_monkey_property(): class A(object): pass @monkey(A) @property def prop(self): return 42 assert A().prop == 42 def f(x): return x def test_monkey_module(): this_module = sys.modules[__name__] @monkey(this_module) def f(x): return f.original(x) * 2 assert f(21) == 42 def test_namespace(): class tests(namespace): is_int = lambda x: isinstance(x, int) tests.is_int(10) def test_lazy_object(): class A(object): x = 42 def __init__(self): log.append('init') log = [] a = LazyObject(A) assert not log assert a.x == 42 funcy-1.17/tests/test_seqs.py000066400000000000000000000147361416001257100163240ustar00rootroot00000000000000from operator import add import pytest from whatever import _ from funcy.compat import range, Iterator, PY3 from funcy import is_list from funcy.seqs import * def test_repeatedly(): counter = count() c = lambda: next(counter) assert take(2, repeatedly(c)) == [0, 1] def test_iterate(): assert take(4, iterate(_ * 2, 1)) == [1, 2, 4, 8] def test_take(): assert take(2, [3, 2, 1]) == [3, 2] assert take(2, count(7)) == [7, 8] def test_drop(): dropped = drop(2, [5, 4, 3, 2]) assert isinstance(dropped, Iterator) assert list(dropped) == [3, 2] assert take(2, drop(2, count())) == [2, 3] def test_first(): assert first('xyz') == 'x' assert first(count(7)) == 7 assert first([]) is None def test_second(): assert second('xyz') == 'y' assert second(count(7)) == 8 assert second('x') is None def test_last(): assert last('xyz') == 'z' assert last(range(1, 10)) == 9 assert last([]) is None assert last(x for x in 'xyz') == 'z' def test_nth(): assert nth(0, 'xyz') == 'x' assert nth(2, 'xyz') == 'z' assert nth(3, 'xyz') is None assert nth(3, count(7)) == 10 def test_butlast(): assert list(butlast('xyz')) == ['x', 'y'] assert list(butlast([])) == [] def test_ilen(): assert ilen('xyz') == 3 assert ilen(range(10)) == 10 def test_lmap(): assert lmap(_ * 2, [2, 3]) == [4, 6] assert lmap(None, [2, 3]) == [2, 3] assert lmap(_ + _, [1, 2], [4, 5]) == [5, 7] assert lmap(r'\d+', ['a2', '13b']) == ['2', '13'] assert lmap({'a': 1, 'b': 2}, 'ab') == [1, 2] assert lmap(set([1,2,3]), [0, 1, 2]) == [False, True, True] assert lmap(1, ['abc', '123']) == ['b', '2'] assert lmap(slice(2), ['abc', '123']) == ['ab', '12'] @pytest.mark.skipif(PY3, reason="map(None, ...) doesn't work in python 3") def test_map_multi(): assert lmap(None, [1, 2, 3], 'abc') == [(1, 'a'), (2, 'b'), (3, 'c')] @pytest.mark.skipif(PY3, reason="imap(None, ...) doesn't work in python 3") def test_imap_strange(): """ Demonstrates funcy.imap and itertools.imap have behavior when given None as f. """ assert list(map(None, 'abc')) == [('a',), ('b',), ('c',)] def test_filter(): assert lfilter(None, [2, 3, 0]) == [2, 3] assert lfilter(r'\d+', ['a2', '13b', 'c']) == ['a2', '13b'] assert lfilter(set([1,2,3]), [0, 1, 2, 4, 1]) == [1, 2, 1] def test_remove(): assert lremove(_ > 3, range(10)) == [0, 1, 2, 3] assert lremove('^a', ['a', 'b', 'ba']) == ['b', 'ba'] def test_keep(): assert lkeep(_ % 3, range(5)) == [1, 2, 1] assert lkeep(range(5)) == [1, 2, 3, 4] assert lkeep(mapcat(range, range(4))) == [1, 1, 2] def test_concat(): assert lconcat('ab', 'cd') == list('abcd') assert lconcat() == [] def test_cat(): assert lcat('abcd') == list('abcd') assert lcat(range(x) for x in range(3)) == [0, 0, 1] def test_flatten(): assert lflatten([1, [2, 3]]) == [1, 2, 3] assert lflatten([[1, 2], 3]) == [1, 2, 3] assert lflatten([(2, 3)]) == [2, 3] assert lflatten([iter([2, 3])]) == [2, 3] def test_flatten_follow(): assert lflatten([1, [2, 3]], follow=is_list) == [1, 2, 3] assert lflatten([1, [(2, 3)]], follow=is_list) == [1, (2, 3)] def test_mapcat(): assert lmapcat(lambda x: [x, x], 'abc') == list('aabbcc') def test_interleave(): assert list(interleave('ab', 'cd')) == list('acbd') assert list(interleave('ab_', 'cd')) == list('acbd') def test_iterpose(): assert list(interpose('.', 'abc')) == list('a.b.c') def test_takewhile(): assert list(takewhile([1, 2, None, 3])) == [1, 2] def test_distinct(): assert ldistinct('abcbad') == list('abcd') assert ldistinct([{}, {}, {'a': 1}, {'b': 2}], key=len) == [{}, {'a': 1}] assert ldistinct(['ab', 'cb', 'ad'], key=0) == ['ab', 'cb'] # Separate test as lsplit() is not implemented via it. def test_split(): assert lmap(list, split(_ % 2, range(5))) == [[1, 3], [0, 2, 4]] def test_lsplit(): assert lsplit(_ % 2, range(5)) == ([1, 3], [0, 2, 4]) # This behaviour moved to split_at() with pytest.raises(TypeError): lsplit(2, range(5)) def test_split_at(): assert lsplit_at(2, range(5)) == ([0, 1], [2, 3, 4]) def test_split_by(): assert lsplit_by(_ % 2, [1, 2, 3]) == ([1], [2, 3]) def test_group_by(): assert group_by(_ % 2, range(5)) == {0: [0, 2, 4], 1: [1, 3]} assert group_by(r'\d', ['a1', 'b2', 'c1']) == {'1': ['a1', 'c1'], '2': ['b2']} def test_group_by_keys(): assert group_by_keys(r'(\d)(\d)', ['12', '23']) == {'1': ['12'], '2': ['12', '23'], '3': ['23']} def test_group_values(): assert group_values(['ab', 'ac', 'ba']) == {'a': ['b', 'c'], 'b': ['a']} def test_count_by(): assert count_by(_ % 2, range(5)) == {0: 3, 1: 2} assert count_by(r'\d', ['a1', 'b2', 'c1']) == {'1': 2, '2': 1} def test_count_by_is_defaultdict(): cnts = count_by(len, []) assert cnts[1] == 0 def test_count_reps(): assert count_reps([0, 1, 0]) == {0: 2, 1: 1} def test_partition(): assert lpartition(2, [0, 1, 2, 3, 4]) == [[0, 1], [2, 3]] assert lpartition(2, 1, [0, 1, 2, 3]) == [[0, 1], [1, 2], [2, 3]] # test iters assert lpartition(2, iter(range(5))) == [[0, 1], [2, 3]] assert lmap(list, lpartition(2, range(5))) == [[0, 1], [2, 3]] def test_chunks(): assert lchunks(2, [0, 1, 2, 3, 4]) == [[0, 1], [2, 3], [4]] assert lchunks(2, 1, [0, 1, 2, 3]) == [[0, 1], [1, 2], [2, 3], [3]] assert lchunks(3, 1, iter(range(3))) == [[0, 1, 2], [1, 2], [2]] def test_partition_by(): assert lpartition_by(lambda x: x == 3, [1,2,3,4,5]) == [[1,2], [3], [4,5]] assert lpartition_by('x', 'abxcd') == [['a', 'b'], ['x'], ['c', 'd']] assert lpartition_by(r'\d', '1211') == [['1'], ['2'], ['1','1']] def test_with_prev(): assert list(with_prev(range(3))) == [(0, None), (1, 0), (2, 1)] def test_with_next(): assert list(with_next(range(3))) == [(0, 1), (1, 2), (2, None)] def test_pairwise(): assert list(pairwise(range(3))) == [(0, 1), (1, 2)] def test_reductions(): assert lreductions(add, []) == [] assert lreductions(add, [None]) == [None] assert lreductions(add, [1, 2, 3, 4]) == [1, 3, 6, 10] assert lreductions(lambda x, y: x + [y], [1,2,3], []) == [[1], [1, 2], [1, 2, 3]] def test_sums(): assert lsums([]) == [] assert lsums([1, 2, 3, 4]) == [1, 3, 6, 10] assert lsums([[1],[2],[3]]) == [[1], [1, 2], [1, 2, 3]] def test_without(): assert lwithout([]) == [] assert lwithout([1, 2, 3, 4]) == [1, 2, 3, 4] assert lwithout([1, 2, 1, 0, 3, 1, 4], 0, 1) == [2, 3, 4] funcy-1.17/tests/test_strings.py000066400000000000000000000017261416001257100170350ustar00rootroot00000000000000from funcy.strings import * def test_re_find(): assert re_find(r'\d+', 'x34y12') == '34' assert re_find(r'y(\d+)', 'x34y12') == '12' assert re_find(r'([a-z]+)(\d+)', 'x34y12') == ('x', '34') assert re_find(r'(?P[a-z]+)(?P\d+)', 'x34y12') == {'l': 'x', 'd': '34'} def test_re_all(): assert re_all(r'\d+', 'x34y12') == ['34', '12'] assert re_all(r'([a-z]+)(\d+)', 'x34y12') == [('x', '34'), ('y', '12')] assert re_all(r'(?P[a-z]+)(?P\d+)', 'x34y12') \ == [{'l': 'x', 'd': '34'}, {'l': 'y', 'd': '12'}] def test_str_join(): assert str_join([1, 2, 3]) == '123' assert str_join('_', [1, 2, 3]) == '1_2_3' assert isinstance(str_join(u'_', [1, 2, 3]), type(u'')) def test_cut_prefix(): assert cut_prefix('name:alex', 'name:') == 'alex' assert cut_prefix('alex', 'name:') == 'alex' def test_cut_suffix(): assert cut_suffix('name.py', '.py') == 'name' assert cut_suffix('name', '.py') == 'name' funcy-1.17/tests/test_tree.py000066400000000000000000000011531416001257100162750ustar00rootroot00000000000000from whatever import _ from funcy import rest from funcy.tree import * def test_tree_leaves(): assert ltree_leaves([1, 2, [3, [4]], 5]) == [1, 2, 3, 4, 5] assert ltree_leaves(1) == [1] assert ltree_leaves(3, follow=_ > 1, children=range) == [0, 1, 0, 1] assert ltree_leaves([1, [2, [3, 4], 5], 6], children=rest) == [4, 5, 6] def test_tree_nodes(): assert ltree_nodes([1, 2, [3, [4]], 5]) == [ [1, 2, [3, [4]], 5], 1, 2, [3, [4]], 3, [4], 4, 5 ] assert ltree_nodes(1) == [1] assert ltree_nodes(3, follow=_ > 1, children=range) == [3, 0, 1, 2, 0, 1] funcy-1.17/tests/test_types.py000066400000000000000000000006701416001257100165050ustar00rootroot00000000000000from funcy.compat import range from funcy.types import * def test_iterable(): assert iterable([]) assert iterable({}) assert iterable('abc') assert iterable(iter([])) assert iterable(x for x in range(10)) assert iterable(range(10)) assert not iterable(1) def test_is_iter(): assert is_iter(iter([])) assert is_iter(x for x in range(10)) assert not is_iter([]) assert not is_iter(range(10)) funcy-1.17/tox.ini000066400000000000000000000012041416001257100140730ustar00rootroot00000000000000[tox] envlist = py27, py34, py35, py36, py37, py38, py39, py310, pypy, pypy3, docs, lint [testenv] deps = -r test_requirements.txt commands = py.test -W error {posargs} [testenv:docs] deps = -r docs/requirements.txt changedir = docs commands = sphinx-build -b html -W . _build/html ; TODO: get rid of flakes [flake8] max-line-length = 100 ignore = E127,E128,E302,F403,E126,E272,E226,E301,E261,E265,E251,E303,E305,E306,E266,E731,E402,F405,W503 exclude = docs/conf.py, .tox [testenv:lint] basepython = python3.8 passenv = PYTHONPATH deps = flake8>=3.8.3 commands = python --version flake8 funcy flake8 --select=F,E5,W tests